repo_id
stringclasses
875 values
size
int64
974
38.9k
file_path
stringlengths
10
308
content
stringlengths
974
38.9k
googleapis/google-cloud-java
35,023
java-visionai/proto-google-cloud-visionai-v1/src/main/java/com/google/cloud/visionai/v1/ListAssetsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/visionai/v1/warehouse.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.visionai.v1; /** * * * <pre> * Response message for ListAssets. * </pre> * * Protobuf type {@code google.cloud.visionai.v1.ListAssetsResponse} */ public final class ListAssetsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.visionai.v1.ListAssetsResponse) ListAssetsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListAssetsResponse.newBuilder() to construct. private ListAssetsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListAssetsResponse() { assets_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListAssetsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_ListAssetsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_ListAssetsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.visionai.v1.ListAssetsResponse.class, com.google.cloud.visionai.v1.ListAssetsResponse.Builder.class); } public static final int ASSETS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.visionai.v1.Asset> assets_; /** * * * <pre> * The assets from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Asset assets = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.visionai.v1.Asset> getAssetsList() { return assets_; } /** * * * <pre> * The assets from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Asset assets = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.visionai.v1.AssetOrBuilder> getAssetsOrBuilderList() { return assets_; } /** * * * <pre> * The assets from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Asset assets = 1;</code> */ @java.lang.Override public int getAssetsCount() { return assets_.size(); } /** * * * <pre> * The assets from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Asset assets = 1;</code> */ @java.lang.Override public com.google.cloud.visionai.v1.Asset getAssets(int index) { return assets_.get(index); } /** * * * <pre> * The assets from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Asset assets = 1;</code> */ @java.lang.Override public com.google.cloud.visionai.v1.AssetOrBuilder getAssetsOrBuilder(int index) { return assets_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < assets_.size(); i++) { output.writeMessage(1, assets_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < assets_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, assets_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.visionai.v1.ListAssetsResponse)) { return super.equals(obj); } com.google.cloud.visionai.v1.ListAssetsResponse other = (com.google.cloud.visionai.v1.ListAssetsResponse) obj; if (!getAssetsList().equals(other.getAssetsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getAssetsCount() > 0) { hash = (37 * hash) + ASSETS_FIELD_NUMBER; hash = (53 * hash) + getAssetsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.visionai.v1.ListAssetsResponse parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.ListAssetsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.ListAssetsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.ListAssetsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.ListAssetsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.ListAssetsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.ListAssetsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.ListAssetsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.visionai.v1.ListAssetsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.ListAssetsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.visionai.v1.ListAssetsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.ListAssetsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.visionai.v1.ListAssetsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for ListAssets. * </pre> * * Protobuf type {@code google.cloud.visionai.v1.ListAssetsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.visionai.v1.ListAssetsResponse) com.google.cloud.visionai.v1.ListAssetsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_ListAssetsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_ListAssetsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.visionai.v1.ListAssetsResponse.class, com.google.cloud.visionai.v1.ListAssetsResponse.Builder.class); } // Construct using com.google.cloud.visionai.v1.ListAssetsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (assetsBuilder_ == null) { assets_ = java.util.Collections.emptyList(); } else { assets_ = null; assetsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_ListAssetsResponse_descriptor; } @java.lang.Override public com.google.cloud.visionai.v1.ListAssetsResponse getDefaultInstanceForType() { return com.google.cloud.visionai.v1.ListAssetsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.visionai.v1.ListAssetsResponse build() { com.google.cloud.visionai.v1.ListAssetsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.visionai.v1.ListAssetsResponse buildPartial() { com.google.cloud.visionai.v1.ListAssetsResponse result = new com.google.cloud.visionai.v1.ListAssetsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.visionai.v1.ListAssetsResponse result) { if (assetsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { assets_ = java.util.Collections.unmodifiableList(assets_); bitField0_ = (bitField0_ & ~0x00000001); } result.assets_ = assets_; } else { result.assets_ = assetsBuilder_.build(); } } private void buildPartial0(com.google.cloud.visionai.v1.ListAssetsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.visionai.v1.ListAssetsResponse) { return mergeFrom((com.google.cloud.visionai.v1.ListAssetsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.visionai.v1.ListAssetsResponse other) { if (other == com.google.cloud.visionai.v1.ListAssetsResponse.getDefaultInstance()) return this; if (assetsBuilder_ == null) { if (!other.assets_.isEmpty()) { if (assets_.isEmpty()) { assets_ = other.assets_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureAssetsIsMutable(); assets_.addAll(other.assets_); } onChanged(); } } else { if (!other.assets_.isEmpty()) { if (assetsBuilder_.isEmpty()) { assetsBuilder_.dispose(); assetsBuilder_ = null; assets_ = other.assets_; bitField0_ = (bitField0_ & ~0x00000001); assetsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getAssetsFieldBuilder() : null; } else { assetsBuilder_.addAllMessages(other.assets_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.visionai.v1.Asset m = input.readMessage( com.google.cloud.visionai.v1.Asset.parser(), extensionRegistry); if (assetsBuilder_ == null) { ensureAssetsIsMutable(); assets_.add(m); } else { assetsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.visionai.v1.Asset> assets_ = java.util.Collections.emptyList(); private void ensureAssetsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { assets_ = new java.util.ArrayList<com.google.cloud.visionai.v1.Asset>(assets_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.visionai.v1.Asset, com.google.cloud.visionai.v1.Asset.Builder, com.google.cloud.visionai.v1.AssetOrBuilder> assetsBuilder_; /** * * * <pre> * The assets from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Asset assets = 1;</code> */ public java.util.List<com.google.cloud.visionai.v1.Asset> getAssetsList() { if (assetsBuilder_ == null) { return java.util.Collections.unmodifiableList(assets_); } else { return assetsBuilder_.getMessageList(); } } /** * * * <pre> * The assets from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Asset assets = 1;</code> */ public int getAssetsCount() { if (assetsBuilder_ == null) { return assets_.size(); } else { return assetsBuilder_.getCount(); } } /** * * * <pre> * The assets from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Asset assets = 1;</code> */ public com.google.cloud.visionai.v1.Asset getAssets(int index) { if (assetsBuilder_ == null) { return assets_.get(index); } else { return assetsBuilder_.getMessage(index); } } /** * * * <pre> * The assets from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Asset assets = 1;</code> */ public Builder setAssets(int index, com.google.cloud.visionai.v1.Asset value) { if (assetsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAssetsIsMutable(); assets_.set(index, value); onChanged(); } else { assetsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The assets from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Asset assets = 1;</code> */ public Builder setAssets( int index, com.google.cloud.visionai.v1.Asset.Builder builderForValue) { if (assetsBuilder_ == null) { ensureAssetsIsMutable(); assets_.set(index, builderForValue.build()); onChanged(); } else { assetsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The assets from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Asset assets = 1;</code> */ public Builder addAssets(com.google.cloud.visionai.v1.Asset value) { if (assetsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAssetsIsMutable(); assets_.add(value); onChanged(); } else { assetsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The assets from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Asset assets = 1;</code> */ public Builder addAssets(int index, com.google.cloud.visionai.v1.Asset value) { if (assetsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAssetsIsMutable(); assets_.add(index, value); onChanged(); } else { assetsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The assets from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Asset assets = 1;</code> */ public Builder addAssets(com.google.cloud.visionai.v1.Asset.Builder builderForValue) { if (assetsBuilder_ == null) { ensureAssetsIsMutable(); assets_.add(builderForValue.build()); onChanged(); } else { assetsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The assets from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Asset assets = 1;</code> */ public Builder addAssets( int index, com.google.cloud.visionai.v1.Asset.Builder builderForValue) { if (assetsBuilder_ == null) { ensureAssetsIsMutable(); assets_.add(index, builderForValue.build()); onChanged(); } else { assetsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The assets from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Asset assets = 1;</code> */ public Builder addAllAssets( java.lang.Iterable<? extends com.google.cloud.visionai.v1.Asset> values) { if (assetsBuilder_ == null) { ensureAssetsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, assets_); onChanged(); } else { assetsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The assets from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Asset assets = 1;</code> */ public Builder clearAssets() { if (assetsBuilder_ == null) { assets_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { assetsBuilder_.clear(); } return this; } /** * * * <pre> * The assets from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Asset assets = 1;</code> */ public Builder removeAssets(int index) { if (assetsBuilder_ == null) { ensureAssetsIsMutable(); assets_.remove(index); onChanged(); } else { assetsBuilder_.remove(index); } return this; } /** * * * <pre> * The assets from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Asset assets = 1;</code> */ public com.google.cloud.visionai.v1.Asset.Builder getAssetsBuilder(int index) { return getAssetsFieldBuilder().getBuilder(index); } /** * * * <pre> * The assets from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Asset assets = 1;</code> */ public com.google.cloud.visionai.v1.AssetOrBuilder getAssetsOrBuilder(int index) { if (assetsBuilder_ == null) { return assets_.get(index); } else { return assetsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The assets from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Asset assets = 1;</code> */ public java.util.List<? extends com.google.cloud.visionai.v1.AssetOrBuilder> getAssetsOrBuilderList() { if (assetsBuilder_ != null) { return assetsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(assets_); } } /** * * * <pre> * The assets from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Asset assets = 1;</code> */ public com.google.cloud.visionai.v1.Asset.Builder addAssetsBuilder() { return getAssetsFieldBuilder() .addBuilder(com.google.cloud.visionai.v1.Asset.getDefaultInstance()); } /** * * * <pre> * The assets from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Asset assets = 1;</code> */ public com.google.cloud.visionai.v1.Asset.Builder addAssetsBuilder(int index) { return getAssetsFieldBuilder() .addBuilder(index, com.google.cloud.visionai.v1.Asset.getDefaultInstance()); } /** * * * <pre> * The assets from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Asset assets = 1;</code> */ public java.util.List<com.google.cloud.visionai.v1.Asset.Builder> getAssetsBuilderList() { return getAssetsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.visionai.v1.Asset, com.google.cloud.visionai.v1.Asset.Builder, com.google.cloud.visionai.v1.AssetOrBuilder> getAssetsFieldBuilder() { if (assetsBuilder_ == null) { assetsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.visionai.v1.Asset, com.google.cloud.visionai.v1.Asset.Builder, com.google.cloud.visionai.v1.AssetOrBuilder>( assets_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); assets_ = null; } return assetsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.visionai.v1.ListAssetsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.visionai.v1.ListAssetsResponse) private static final com.google.cloud.visionai.v1.ListAssetsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.visionai.v1.ListAssetsResponse(); } public static com.google.cloud.visionai.v1.ListAssetsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListAssetsResponse> PARSER = new com.google.protobuf.AbstractParser<ListAssetsResponse>() { @java.lang.Override public ListAssetsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListAssetsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListAssetsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.visionai.v1.ListAssetsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,038
java-run/proto-google-cloud-run-v2/src/main/java/com/google/cloud/run/v2/ListRevisionsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/run/v2/revision.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.run.v2; /** * * * <pre> * Response message containing a list of Revisions. * </pre> * * Protobuf type {@code google.cloud.run.v2.ListRevisionsResponse} */ public final class ListRevisionsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.run.v2.ListRevisionsResponse) ListRevisionsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListRevisionsResponse.newBuilder() to construct. private ListRevisionsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListRevisionsResponse() { revisions_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListRevisionsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.run.v2.RevisionProto .internal_static_google_cloud_run_v2_ListRevisionsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.run.v2.RevisionProto .internal_static_google_cloud_run_v2_ListRevisionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.run.v2.ListRevisionsResponse.class, com.google.cloud.run.v2.ListRevisionsResponse.Builder.class); } public static final int REVISIONS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.run.v2.Revision> revisions_; /** * * * <pre> * The resulting list of Revisions. * </pre> * * <code>repeated .google.cloud.run.v2.Revision revisions = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.run.v2.Revision> getRevisionsList() { return revisions_; } /** * * * <pre> * The resulting list of Revisions. * </pre> * * <code>repeated .google.cloud.run.v2.Revision revisions = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.run.v2.RevisionOrBuilder> getRevisionsOrBuilderList() { return revisions_; } /** * * * <pre> * The resulting list of Revisions. * </pre> * * <code>repeated .google.cloud.run.v2.Revision revisions = 1;</code> */ @java.lang.Override public int getRevisionsCount() { return revisions_.size(); } /** * * * <pre> * The resulting list of Revisions. * </pre> * * <code>repeated .google.cloud.run.v2.Revision revisions = 1;</code> */ @java.lang.Override public com.google.cloud.run.v2.Revision getRevisions(int index) { return revisions_.get(index); } /** * * * <pre> * The resulting list of Revisions. * </pre> * * <code>repeated .google.cloud.run.v2.Revision revisions = 1;</code> */ @java.lang.Override public com.google.cloud.run.v2.RevisionOrBuilder getRevisionsOrBuilder(int index) { return revisions_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token indicating there are more items than page_size. Use it in the next * ListRevisions request to continue. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token indicating there are more items than page_size. Use it in the next * ListRevisions request to continue. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < revisions_.size(); i++) { output.writeMessage(1, revisions_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < revisions_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, revisions_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.run.v2.ListRevisionsResponse)) { return super.equals(obj); } com.google.cloud.run.v2.ListRevisionsResponse other = (com.google.cloud.run.v2.ListRevisionsResponse) obj; if (!getRevisionsList().equals(other.getRevisionsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getRevisionsCount() > 0) { hash = (37 * hash) + REVISIONS_FIELD_NUMBER; hash = (53 * hash) + getRevisionsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.run.v2.ListRevisionsResponse parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.run.v2.ListRevisionsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.run.v2.ListRevisionsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.run.v2.ListRevisionsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.run.v2.ListRevisionsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.run.v2.ListRevisionsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.run.v2.ListRevisionsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.run.v2.ListRevisionsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.run.v2.ListRevisionsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.run.v2.ListRevisionsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.run.v2.ListRevisionsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.run.v2.ListRevisionsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.run.v2.ListRevisionsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message containing a list of Revisions. * </pre> * * Protobuf type {@code google.cloud.run.v2.ListRevisionsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.run.v2.ListRevisionsResponse) com.google.cloud.run.v2.ListRevisionsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.run.v2.RevisionProto .internal_static_google_cloud_run_v2_ListRevisionsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.run.v2.RevisionProto .internal_static_google_cloud_run_v2_ListRevisionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.run.v2.ListRevisionsResponse.class, com.google.cloud.run.v2.ListRevisionsResponse.Builder.class); } // Construct using com.google.cloud.run.v2.ListRevisionsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (revisionsBuilder_ == null) { revisions_ = java.util.Collections.emptyList(); } else { revisions_ = null; revisionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.run.v2.RevisionProto .internal_static_google_cloud_run_v2_ListRevisionsResponse_descriptor; } @java.lang.Override public com.google.cloud.run.v2.ListRevisionsResponse getDefaultInstanceForType() { return com.google.cloud.run.v2.ListRevisionsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.run.v2.ListRevisionsResponse build() { com.google.cloud.run.v2.ListRevisionsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.run.v2.ListRevisionsResponse buildPartial() { com.google.cloud.run.v2.ListRevisionsResponse result = new com.google.cloud.run.v2.ListRevisionsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(com.google.cloud.run.v2.ListRevisionsResponse result) { if (revisionsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { revisions_ = java.util.Collections.unmodifiableList(revisions_); bitField0_ = (bitField0_ & ~0x00000001); } result.revisions_ = revisions_; } else { result.revisions_ = revisionsBuilder_.build(); } } private void buildPartial0(com.google.cloud.run.v2.ListRevisionsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.run.v2.ListRevisionsResponse) { return mergeFrom((com.google.cloud.run.v2.ListRevisionsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.run.v2.ListRevisionsResponse other) { if (other == com.google.cloud.run.v2.ListRevisionsResponse.getDefaultInstance()) return this; if (revisionsBuilder_ == null) { if (!other.revisions_.isEmpty()) { if (revisions_.isEmpty()) { revisions_ = other.revisions_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureRevisionsIsMutable(); revisions_.addAll(other.revisions_); } onChanged(); } } else { if (!other.revisions_.isEmpty()) { if (revisionsBuilder_.isEmpty()) { revisionsBuilder_.dispose(); revisionsBuilder_ = null; revisions_ = other.revisions_; bitField0_ = (bitField0_ & ~0x00000001); revisionsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRevisionsFieldBuilder() : null; } else { revisionsBuilder_.addAllMessages(other.revisions_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.run.v2.Revision m = input.readMessage(com.google.cloud.run.v2.Revision.parser(), extensionRegistry); if (revisionsBuilder_ == null) { ensureRevisionsIsMutable(); revisions_.add(m); } else { revisionsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.run.v2.Revision> revisions_ = java.util.Collections.emptyList(); private void ensureRevisionsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { revisions_ = new java.util.ArrayList<com.google.cloud.run.v2.Revision>(revisions_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.run.v2.Revision, com.google.cloud.run.v2.Revision.Builder, com.google.cloud.run.v2.RevisionOrBuilder> revisionsBuilder_; /** * * * <pre> * The resulting list of Revisions. * </pre> * * <code>repeated .google.cloud.run.v2.Revision revisions = 1;</code> */ public java.util.List<com.google.cloud.run.v2.Revision> getRevisionsList() { if (revisionsBuilder_ == null) { return java.util.Collections.unmodifiableList(revisions_); } else { return revisionsBuilder_.getMessageList(); } } /** * * * <pre> * The resulting list of Revisions. * </pre> * * <code>repeated .google.cloud.run.v2.Revision revisions = 1;</code> */ public int getRevisionsCount() { if (revisionsBuilder_ == null) { return revisions_.size(); } else { return revisionsBuilder_.getCount(); } } /** * * * <pre> * The resulting list of Revisions. * </pre> * * <code>repeated .google.cloud.run.v2.Revision revisions = 1;</code> */ public com.google.cloud.run.v2.Revision getRevisions(int index) { if (revisionsBuilder_ == null) { return revisions_.get(index); } else { return revisionsBuilder_.getMessage(index); } } /** * * * <pre> * The resulting list of Revisions. * </pre> * * <code>repeated .google.cloud.run.v2.Revision revisions = 1;</code> */ public Builder setRevisions(int index, com.google.cloud.run.v2.Revision value) { if (revisionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRevisionsIsMutable(); revisions_.set(index, value); onChanged(); } else { revisionsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The resulting list of Revisions. * </pre> * * <code>repeated .google.cloud.run.v2.Revision revisions = 1;</code> */ public Builder setRevisions( int index, com.google.cloud.run.v2.Revision.Builder builderForValue) { if (revisionsBuilder_ == null) { ensureRevisionsIsMutable(); revisions_.set(index, builderForValue.build()); onChanged(); } else { revisionsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The resulting list of Revisions. * </pre> * * <code>repeated .google.cloud.run.v2.Revision revisions = 1;</code> */ public Builder addRevisions(com.google.cloud.run.v2.Revision value) { if (revisionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRevisionsIsMutable(); revisions_.add(value); onChanged(); } else { revisionsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The resulting list of Revisions. * </pre> * * <code>repeated .google.cloud.run.v2.Revision revisions = 1;</code> */ public Builder addRevisions(int index, com.google.cloud.run.v2.Revision value) { if (revisionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRevisionsIsMutable(); revisions_.add(index, value); onChanged(); } else { revisionsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The resulting list of Revisions. * </pre> * * <code>repeated .google.cloud.run.v2.Revision revisions = 1;</code> */ public Builder addRevisions(com.google.cloud.run.v2.Revision.Builder builderForValue) { if (revisionsBuilder_ == null) { ensureRevisionsIsMutable(); revisions_.add(builderForValue.build()); onChanged(); } else { revisionsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The resulting list of Revisions. * </pre> * * <code>repeated .google.cloud.run.v2.Revision revisions = 1;</code> */ public Builder addRevisions( int index, com.google.cloud.run.v2.Revision.Builder builderForValue) { if (revisionsBuilder_ == null) { ensureRevisionsIsMutable(); revisions_.add(index, builderForValue.build()); onChanged(); } else { revisionsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The resulting list of Revisions. * </pre> * * <code>repeated .google.cloud.run.v2.Revision revisions = 1;</code> */ public Builder addAllRevisions( java.lang.Iterable<? extends com.google.cloud.run.v2.Revision> values) { if (revisionsBuilder_ == null) { ensureRevisionsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, revisions_); onChanged(); } else { revisionsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The resulting list of Revisions. * </pre> * * <code>repeated .google.cloud.run.v2.Revision revisions = 1;</code> */ public Builder clearRevisions() { if (revisionsBuilder_ == null) { revisions_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { revisionsBuilder_.clear(); } return this; } /** * * * <pre> * The resulting list of Revisions. * </pre> * * <code>repeated .google.cloud.run.v2.Revision revisions = 1;</code> */ public Builder removeRevisions(int index) { if (revisionsBuilder_ == null) { ensureRevisionsIsMutable(); revisions_.remove(index); onChanged(); } else { revisionsBuilder_.remove(index); } return this; } /** * * * <pre> * The resulting list of Revisions. * </pre> * * <code>repeated .google.cloud.run.v2.Revision revisions = 1;</code> */ public com.google.cloud.run.v2.Revision.Builder getRevisionsBuilder(int index) { return getRevisionsFieldBuilder().getBuilder(index); } /** * * * <pre> * The resulting list of Revisions. * </pre> * * <code>repeated .google.cloud.run.v2.Revision revisions = 1;</code> */ public com.google.cloud.run.v2.RevisionOrBuilder getRevisionsOrBuilder(int index) { if (revisionsBuilder_ == null) { return revisions_.get(index); } else { return revisionsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The resulting list of Revisions. * </pre> * * <code>repeated .google.cloud.run.v2.Revision revisions = 1;</code> */ public java.util.List<? extends com.google.cloud.run.v2.RevisionOrBuilder> getRevisionsOrBuilderList() { if (revisionsBuilder_ != null) { return revisionsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(revisions_); } } /** * * * <pre> * The resulting list of Revisions. * </pre> * * <code>repeated .google.cloud.run.v2.Revision revisions = 1;</code> */ public com.google.cloud.run.v2.Revision.Builder addRevisionsBuilder() { return getRevisionsFieldBuilder() .addBuilder(com.google.cloud.run.v2.Revision.getDefaultInstance()); } /** * * * <pre> * The resulting list of Revisions. * </pre> * * <code>repeated .google.cloud.run.v2.Revision revisions = 1;</code> */ public com.google.cloud.run.v2.Revision.Builder addRevisionsBuilder(int index) { return getRevisionsFieldBuilder() .addBuilder(index, com.google.cloud.run.v2.Revision.getDefaultInstance()); } /** * * * <pre> * The resulting list of Revisions. * </pre> * * <code>repeated .google.cloud.run.v2.Revision revisions = 1;</code> */ public java.util.List<com.google.cloud.run.v2.Revision.Builder> getRevisionsBuilderList() { return getRevisionsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.run.v2.Revision, com.google.cloud.run.v2.Revision.Builder, com.google.cloud.run.v2.RevisionOrBuilder> getRevisionsFieldBuilder() { if (revisionsBuilder_ == null) { revisionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.run.v2.Revision, com.google.cloud.run.v2.Revision.Builder, com.google.cloud.run.v2.RevisionOrBuilder>( revisions_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); revisions_ = null; } return revisionsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token indicating there are more items than page_size. Use it in the next * ListRevisions request to continue. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token indicating there are more items than page_size. Use it in the next * ListRevisions request to continue. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token indicating there are more items than page_size. Use it in the next * ListRevisions request to continue. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token indicating there are more items than page_size. Use it in the next * ListRevisions request to continue. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token indicating there are more items than page_size. Use it in the next * ListRevisions request to continue. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.run.v2.ListRevisionsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.run.v2.ListRevisionsResponse) private static final com.google.cloud.run.v2.ListRevisionsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.run.v2.ListRevisionsResponse(); } public static com.google.cloud.run.v2.ListRevisionsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListRevisionsResponse> PARSER = new com.google.protobuf.AbstractParser<ListRevisionsResponse>() { @java.lang.Override public ListRevisionsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListRevisionsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListRevisionsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.run.v2.ListRevisionsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-api-java-client-services
35,312
clients/google-api-services-fcm/v1/1.31.0/com/google/api/services/fcm/v1/model/AndroidNotification.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.fcm.v1.model; /** * Notification to send to android devices. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Firebase Cloud Messaging API. For a detailed * explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class AndroidNotification extends com.google.api.client.json.GenericJson { /** * The notification's body text. If present, it will override * google.firebase.fcm.v1.Notification.body. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String body; /** * Variable string values to be used in place of the format specifiers in body_loc_key to use to * localize the body text to the user's current localization. See [Formatting and * Styling](https://goo.gl/MalYE3) for more information. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> bodyLocArgs; /** * The key to the body string in the app's string resources to use to localize the body text to * the user's current localization. See [String Resources](https://goo.gl/NdFZGI) for more * information. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String bodyLocKey; /** * If set, display notifications delivered to the device will be handled by the app instead of the * proxy. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean bypassProxyNotification; /** * The [notification's channel * id](https://developer.android.com/guide/topics/ui/notifiers/notifications#ManageChannels) (new * in Android O). The app must create a channel with this channel ID before any notification with * this channel ID is received. If you don't send this channel ID in the request, or if the * channel ID provided has not yet been created by the app, FCM uses the channel ID specified in * the app manifest. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String channelId; /** * The action associated with a user click on the notification. If specified, an activity with a * matching intent filter is launched when a user clicks on the notification. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String clickAction; /** * The notification's icon color, expressed in #rrggbb format. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String color; /** * If set to true, use the Android framework's default LED light settings for the notification. * Default values are specified in [config.xml](https://android.googlesource.com/platform/framewor * ks/base/+/master/core/res/res/values/config.xml). If `default_light_settings` is set to true * and `light_settings` is also set, the user-specified `light_settings` is used instead of the * default value. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean defaultLightSettings; /** * If set to true, use the Android framework's default sound for the notification. Default values * are specified in [config.xml](https://android.googlesource.com/platform/frameworks/base/+/maste * r/core/res/res/values/config.xml). * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean defaultSound; /** * If set to true, use the Android framework's default vibrate pattern for the notification. * Default values are specified in [config.xml](https://android.googlesource.com/platform/framewor * ks/base/+/master/core/res/res/values/config.xml). If `default_vibrate_timings` is set to true * and `vibrate_timings` is also set, the default value is used instead of the user-specified * `vibrate_timings`. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean defaultVibrateTimings; /** * Set the time that the event in the notification occurred. Notifications in the panel are sorted * by this time. A point in time is represented using * [protobuf.Timestamp](https://developers.google.com/protocol- * buffers/docs/reference/java/com/google/protobuf/Timestamp). * The value may be {@code null}. */ @com.google.api.client.util.Key private String eventTime; /** * The notification's icon. Sets the notification icon to myicon for drawable resource myicon. If * you don't send this key in the request, FCM displays the launcher icon specified in your app * manifest. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String icon; /** * Contains the URL of an image that is going to be displayed in a notification. If present, it * will override google.firebase.fcm.v1.Notification.image. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String image; /** * Settings to control the notification's LED blinking rate and color if LED is available on the * device. The total blinking time is controlled by the OS. * The value may be {@code null}. */ @com.google.api.client.util.Key private LightSettings lightSettings; /** * Set whether or not this notification is relevant only to the current device. Some notifications * can be bridged to other devices for remote display, such as a Wear OS watch. This hint can be * set to recommend this notification not be bridged. See [Wear OS * guides](https://developer.android.com/training/wearables/notifications/bridger#existing-method- * of-preventing-bridging) * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean localOnly; /** * Sets the number of items this notification represents. May be displayed as a badge count for * launchers that support badging.See [Notification Badge](https://developer.android.com/training * /notify-user/badges). For example, this might be useful if you're using just one notification * to represent multiple new messages but you want the count here to represent the number of total * new messages. If zero or unspecified, systems that support badging use the default, which is to * increment a number displayed on the long-press menu each time a new notification arrives. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer notificationCount; /** * Set the relative priority for this notification. Priority is an indication of how much of the * user's attention should be consumed by this notification. Low-priority notifications may be * hidden from the user in certain situations, while the user might be interrupted for a higher- * priority notification. The effect of setting the same priorities may differ slightly on * different platforms. Note this priority differs from `AndroidMessagePriority`. This priority is * processed by the client after the message has been delivered, whereas [AndroidMessagePriority]( * https://firebase.google.com/docs/reference/fcm/rest/v1/projects.messages#androidmessagepriority * ) is an FCM concept that controls when the message is delivered. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String notificationPriority; /** * The sound to play when the device receives the notification. Supports "default" or the filename * of a sound resource bundled in the app. Sound files must reside in /res/raw/. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String sound; /** * When set to false or unset, the notification is automatically dismissed when the user clicks it * in the panel. When set to true, the notification persists even when the user clicks it. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean sticky; /** * Identifier used to replace existing notifications in the notification drawer. If not specified, * each request creates a new notification. If specified and a notification with the same tag is * already being shown, the new notification replaces the existing one in the notification drawer. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String tag; /** * Sets the "ticker" text, which is sent to accessibility services. Prior to API level 21 * (`Lollipop`), sets the text that is displayed in the status bar when the notification first * arrives. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String ticker; /** * The notification's title. If present, it will override * google.firebase.fcm.v1.Notification.title. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String title; /** * Variable string values to be used in place of the format specifiers in title_loc_key to use to * localize the title text to the user's current localization. See [Formatting and * Styling](https://goo.gl/MalYE3) for more information. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> titleLocArgs; /** * The key to the title string in the app's string resources to use to localize the title text to * the user's current localization. See [String Resources](https://goo.gl/NdFZGI) for more * information. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String titleLocKey; /** * Set the vibration pattern to use. Pass in an array of * [protobuf.Duration](https://developers.google.com/protocol- * buffers/docs/reference/google.protobuf#google.protobuf.Duration) to turn on or off the * vibrator. The first value indicates the `Duration` to wait before turning the vibrator on. The * next value indicates the `Duration` to keep the vibrator on. Subsequent values alternate * between `Duration` to turn the vibrator off and to turn the vibrator on. If `vibrate_timings` * is set and `default_vibrate_timings` is set to `true`, the default value is used instead of the * user-specified `vibrate_timings`. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<String> vibrateTimings; /** * Set the [Notification.visibility](https://developer.android.com/reference/android/app/Notificat * ion.html#visibility) of the notification. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String visibility; /** * The notification's body text. If present, it will override * google.firebase.fcm.v1.Notification.body. * @return value or {@code null} for none */ public java.lang.String getBody() { return body; } /** * The notification's body text. If present, it will override * google.firebase.fcm.v1.Notification.body. * @param body body or {@code null} for none */ public AndroidNotification setBody(java.lang.String body) { this.body = body; return this; } /** * Variable string values to be used in place of the format specifiers in body_loc_key to use to * localize the body text to the user's current localization. See [Formatting and * Styling](https://goo.gl/MalYE3) for more information. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getBodyLocArgs() { return bodyLocArgs; } /** * Variable string values to be used in place of the format specifiers in body_loc_key to use to * localize the body text to the user's current localization. See [Formatting and * Styling](https://goo.gl/MalYE3) for more information. * @param bodyLocArgs bodyLocArgs or {@code null} for none */ public AndroidNotification setBodyLocArgs(java.util.List<java.lang.String> bodyLocArgs) { this.bodyLocArgs = bodyLocArgs; return this; } /** * The key to the body string in the app's string resources to use to localize the body text to * the user's current localization. See [String Resources](https://goo.gl/NdFZGI) for more * information. * @return value or {@code null} for none */ public java.lang.String getBodyLocKey() { return bodyLocKey; } /** * The key to the body string in the app's string resources to use to localize the body text to * the user's current localization. See [String Resources](https://goo.gl/NdFZGI) for more * information. * @param bodyLocKey bodyLocKey or {@code null} for none */ public AndroidNotification setBodyLocKey(java.lang.String bodyLocKey) { this.bodyLocKey = bodyLocKey; return this; } /** * If set, display notifications delivered to the device will be handled by the app instead of the * proxy. * @return value or {@code null} for none */ public java.lang.Boolean getBypassProxyNotification() { return bypassProxyNotification; } /** * If set, display notifications delivered to the device will be handled by the app instead of the * proxy. * @param bypassProxyNotification bypassProxyNotification or {@code null} for none */ public AndroidNotification setBypassProxyNotification(java.lang.Boolean bypassProxyNotification) { this.bypassProxyNotification = bypassProxyNotification; return this; } /** * The [notification's channel * id](https://developer.android.com/guide/topics/ui/notifiers/notifications#ManageChannels) (new * in Android O). The app must create a channel with this channel ID before any notification with * this channel ID is received. If you don't send this channel ID in the request, or if the * channel ID provided has not yet been created by the app, FCM uses the channel ID specified in * the app manifest. * @return value or {@code null} for none */ public java.lang.String getChannelId() { return channelId; } /** * The [notification's channel * id](https://developer.android.com/guide/topics/ui/notifiers/notifications#ManageChannels) (new * in Android O). The app must create a channel with this channel ID before any notification with * this channel ID is received. If you don't send this channel ID in the request, or if the * channel ID provided has not yet been created by the app, FCM uses the channel ID specified in * the app manifest. * @param channelId channelId or {@code null} for none */ public AndroidNotification setChannelId(java.lang.String channelId) { this.channelId = channelId; return this; } /** * The action associated with a user click on the notification. If specified, an activity with a * matching intent filter is launched when a user clicks on the notification. * @return value or {@code null} for none */ public java.lang.String getClickAction() { return clickAction; } /** * The action associated with a user click on the notification. If specified, an activity with a * matching intent filter is launched when a user clicks on the notification. * @param clickAction clickAction or {@code null} for none */ public AndroidNotification setClickAction(java.lang.String clickAction) { this.clickAction = clickAction; return this; } /** * The notification's icon color, expressed in #rrggbb format. * @return value or {@code null} for none */ public java.lang.String getColor() { return color; } /** * The notification's icon color, expressed in #rrggbb format. * @param color color or {@code null} for none */ public AndroidNotification setColor(java.lang.String color) { this.color = color; return this; } /** * If set to true, use the Android framework's default LED light settings for the notification. * Default values are specified in [config.xml](https://android.googlesource.com/platform/framewor * ks/base/+/master/core/res/res/values/config.xml). If `default_light_settings` is set to true * and `light_settings` is also set, the user-specified `light_settings` is used instead of the * default value. * @return value or {@code null} for none */ public java.lang.Boolean getDefaultLightSettings() { return defaultLightSettings; } /** * If set to true, use the Android framework's default LED light settings for the notification. * Default values are specified in [config.xml](https://android.googlesource.com/platform/framewor * ks/base/+/master/core/res/res/values/config.xml). If `default_light_settings` is set to true * and `light_settings` is also set, the user-specified `light_settings` is used instead of the * default value. * @param defaultLightSettings defaultLightSettings or {@code null} for none */ public AndroidNotification setDefaultLightSettings(java.lang.Boolean defaultLightSettings) { this.defaultLightSettings = defaultLightSettings; return this; } /** * If set to true, use the Android framework's default sound for the notification. Default values * are specified in [config.xml](https://android.googlesource.com/platform/frameworks/base/+/maste * r/core/res/res/values/config.xml). * @return value or {@code null} for none */ public java.lang.Boolean getDefaultSound() { return defaultSound; } /** * If set to true, use the Android framework's default sound for the notification. Default values * are specified in [config.xml](https://android.googlesource.com/platform/frameworks/base/+/maste * r/core/res/res/values/config.xml). * @param defaultSound defaultSound or {@code null} for none */ public AndroidNotification setDefaultSound(java.lang.Boolean defaultSound) { this.defaultSound = defaultSound; return this; } /** * If set to true, use the Android framework's default vibrate pattern for the notification. * Default values are specified in [config.xml](https://android.googlesource.com/platform/framewor * ks/base/+/master/core/res/res/values/config.xml). If `default_vibrate_timings` is set to true * and `vibrate_timings` is also set, the default value is used instead of the user-specified * `vibrate_timings`. * @return value or {@code null} for none */ public java.lang.Boolean getDefaultVibrateTimings() { return defaultVibrateTimings; } /** * If set to true, use the Android framework's default vibrate pattern for the notification. * Default values are specified in [config.xml](https://android.googlesource.com/platform/framewor * ks/base/+/master/core/res/res/values/config.xml). If `default_vibrate_timings` is set to true * and `vibrate_timings` is also set, the default value is used instead of the user-specified * `vibrate_timings`. * @param defaultVibrateTimings defaultVibrateTimings or {@code null} for none */ public AndroidNotification setDefaultVibrateTimings(java.lang.Boolean defaultVibrateTimings) { this.defaultVibrateTimings = defaultVibrateTimings; return this; } /** * Set the time that the event in the notification occurred. Notifications in the panel are sorted * by this time. A point in time is represented using * [protobuf.Timestamp](https://developers.google.com/protocol- * buffers/docs/reference/java/com/google/protobuf/Timestamp). * @return value or {@code null} for none */ public String getEventTime() { return eventTime; } /** * Set the time that the event in the notification occurred. Notifications in the panel are sorted * by this time. A point in time is represented using * [protobuf.Timestamp](https://developers.google.com/protocol- * buffers/docs/reference/java/com/google/protobuf/Timestamp). * @param eventTime eventTime or {@code null} for none */ public AndroidNotification setEventTime(String eventTime) { this.eventTime = eventTime; return this; } /** * The notification's icon. Sets the notification icon to myicon for drawable resource myicon. If * you don't send this key in the request, FCM displays the launcher icon specified in your app * manifest. * @return value or {@code null} for none */ public java.lang.String getIcon() { return icon; } /** * The notification's icon. Sets the notification icon to myicon for drawable resource myicon. If * you don't send this key in the request, FCM displays the launcher icon specified in your app * manifest. * @param icon icon or {@code null} for none */ public AndroidNotification setIcon(java.lang.String icon) { this.icon = icon; return this; } /** * Contains the URL of an image that is going to be displayed in a notification. If present, it * will override google.firebase.fcm.v1.Notification.image. * @return value or {@code null} for none */ public java.lang.String getImage() { return image; } /** * Contains the URL of an image that is going to be displayed in a notification. If present, it * will override google.firebase.fcm.v1.Notification.image. * @param image image or {@code null} for none */ public AndroidNotification setImage(java.lang.String image) { this.image = image; return this; } /** * Settings to control the notification's LED blinking rate and color if LED is available on the * device. The total blinking time is controlled by the OS. * @return value or {@code null} for none */ public LightSettings getLightSettings() { return lightSettings; } /** * Settings to control the notification's LED blinking rate and color if LED is available on the * device. The total blinking time is controlled by the OS. * @param lightSettings lightSettings or {@code null} for none */ public AndroidNotification setLightSettings(LightSettings lightSettings) { this.lightSettings = lightSettings; return this; } /** * Set whether or not this notification is relevant only to the current device. Some notifications * can be bridged to other devices for remote display, such as a Wear OS watch. This hint can be * set to recommend this notification not be bridged. See [Wear OS * guides](https://developer.android.com/training/wearables/notifications/bridger#existing-method- * of-preventing-bridging) * @return value or {@code null} for none */ public java.lang.Boolean getLocalOnly() { return localOnly; } /** * Set whether or not this notification is relevant only to the current device. Some notifications * can be bridged to other devices for remote display, such as a Wear OS watch. This hint can be * set to recommend this notification not be bridged. See [Wear OS * guides](https://developer.android.com/training/wearables/notifications/bridger#existing-method- * of-preventing-bridging) * @param localOnly localOnly or {@code null} for none */ public AndroidNotification setLocalOnly(java.lang.Boolean localOnly) { this.localOnly = localOnly; return this; } /** * Sets the number of items this notification represents. May be displayed as a badge count for * launchers that support badging.See [Notification Badge](https://developer.android.com/training * /notify-user/badges). For example, this might be useful if you're using just one notification * to represent multiple new messages but you want the count here to represent the number of total * new messages. If zero or unspecified, systems that support badging use the default, which is to * increment a number displayed on the long-press menu each time a new notification arrives. * @return value or {@code null} for none */ public java.lang.Integer getNotificationCount() { return notificationCount; } /** * Sets the number of items this notification represents. May be displayed as a badge count for * launchers that support badging.See [Notification Badge](https://developer.android.com/training * /notify-user/badges). For example, this might be useful if you're using just one notification * to represent multiple new messages but you want the count here to represent the number of total * new messages. If zero or unspecified, systems that support badging use the default, which is to * increment a number displayed on the long-press menu each time a new notification arrives. * @param notificationCount notificationCount or {@code null} for none */ public AndroidNotification setNotificationCount(java.lang.Integer notificationCount) { this.notificationCount = notificationCount; return this; } /** * Set the relative priority for this notification. Priority is an indication of how much of the * user's attention should be consumed by this notification. Low-priority notifications may be * hidden from the user in certain situations, while the user might be interrupted for a higher- * priority notification. The effect of setting the same priorities may differ slightly on * different platforms. Note this priority differs from `AndroidMessagePriority`. This priority is * processed by the client after the message has been delivered, whereas [AndroidMessagePriority]( * https://firebase.google.com/docs/reference/fcm/rest/v1/projects.messages#androidmessagepriority * ) is an FCM concept that controls when the message is delivered. * @return value or {@code null} for none */ public java.lang.String getNotificationPriority() { return notificationPriority; } /** * Set the relative priority for this notification. Priority is an indication of how much of the * user's attention should be consumed by this notification. Low-priority notifications may be * hidden from the user in certain situations, while the user might be interrupted for a higher- * priority notification. The effect of setting the same priorities may differ slightly on * different platforms. Note this priority differs from `AndroidMessagePriority`. This priority is * processed by the client after the message has been delivered, whereas [AndroidMessagePriority]( * https://firebase.google.com/docs/reference/fcm/rest/v1/projects.messages#androidmessagepriority * ) is an FCM concept that controls when the message is delivered. * @param notificationPriority notificationPriority or {@code null} for none */ public AndroidNotification setNotificationPriority(java.lang.String notificationPriority) { this.notificationPriority = notificationPriority; return this; } /** * The sound to play when the device receives the notification. Supports "default" or the filename * of a sound resource bundled in the app. Sound files must reside in /res/raw/. * @return value or {@code null} for none */ public java.lang.String getSound() { return sound; } /** * The sound to play when the device receives the notification. Supports "default" or the filename * of a sound resource bundled in the app. Sound files must reside in /res/raw/. * @param sound sound or {@code null} for none */ public AndroidNotification setSound(java.lang.String sound) { this.sound = sound; return this; } /** * When set to false or unset, the notification is automatically dismissed when the user clicks it * in the panel. When set to true, the notification persists even when the user clicks it. * @return value or {@code null} for none */ public java.lang.Boolean getSticky() { return sticky; } /** * When set to false or unset, the notification is automatically dismissed when the user clicks it * in the panel. When set to true, the notification persists even when the user clicks it. * @param sticky sticky or {@code null} for none */ public AndroidNotification setSticky(java.lang.Boolean sticky) { this.sticky = sticky; return this; } /** * Identifier used to replace existing notifications in the notification drawer. If not specified, * each request creates a new notification. If specified and a notification with the same tag is * already being shown, the new notification replaces the existing one in the notification drawer. * @return value or {@code null} for none */ public java.lang.String getTag() { return tag; } /** * Identifier used to replace existing notifications in the notification drawer. If not specified, * each request creates a new notification. If specified and a notification with the same tag is * already being shown, the new notification replaces the existing one in the notification drawer. * @param tag tag or {@code null} for none */ public AndroidNotification setTag(java.lang.String tag) { this.tag = tag; return this; } /** * Sets the "ticker" text, which is sent to accessibility services. Prior to API level 21 * (`Lollipop`), sets the text that is displayed in the status bar when the notification first * arrives. * @return value or {@code null} for none */ public java.lang.String getTicker() { return ticker; } /** * Sets the "ticker" text, which is sent to accessibility services. Prior to API level 21 * (`Lollipop`), sets the text that is displayed in the status bar when the notification first * arrives. * @param ticker ticker or {@code null} for none */ public AndroidNotification setTicker(java.lang.String ticker) { this.ticker = ticker; return this; } /** * The notification's title. If present, it will override * google.firebase.fcm.v1.Notification.title. * @return value or {@code null} for none */ public java.lang.String getTitle() { return title; } /** * The notification's title. If present, it will override * google.firebase.fcm.v1.Notification.title. * @param title title or {@code null} for none */ public AndroidNotification setTitle(java.lang.String title) { this.title = title; return this; } /** * Variable string values to be used in place of the format specifiers in title_loc_key to use to * localize the title text to the user's current localization. See [Formatting and * Styling](https://goo.gl/MalYE3) for more information. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getTitleLocArgs() { return titleLocArgs; } /** * Variable string values to be used in place of the format specifiers in title_loc_key to use to * localize the title text to the user's current localization. See [Formatting and * Styling](https://goo.gl/MalYE3) for more information. * @param titleLocArgs titleLocArgs or {@code null} for none */ public AndroidNotification setTitleLocArgs(java.util.List<java.lang.String> titleLocArgs) { this.titleLocArgs = titleLocArgs; return this; } /** * The key to the title string in the app's string resources to use to localize the title text to * the user's current localization. See [String Resources](https://goo.gl/NdFZGI) for more * information. * @return value or {@code null} for none */ public java.lang.String getTitleLocKey() { return titleLocKey; } /** * The key to the title string in the app's string resources to use to localize the title text to * the user's current localization. See [String Resources](https://goo.gl/NdFZGI) for more * information. * @param titleLocKey titleLocKey or {@code null} for none */ public AndroidNotification setTitleLocKey(java.lang.String titleLocKey) { this.titleLocKey = titleLocKey; return this; } /** * Set the vibration pattern to use. Pass in an array of * [protobuf.Duration](https://developers.google.com/protocol- * buffers/docs/reference/google.protobuf#google.protobuf.Duration) to turn on or off the * vibrator. The first value indicates the `Duration` to wait before turning the vibrator on. The * next value indicates the `Duration` to keep the vibrator on. Subsequent values alternate * between `Duration` to turn the vibrator off and to turn the vibrator on. If `vibrate_timings` * is set and `default_vibrate_timings` is set to `true`, the default value is used instead of the * user-specified `vibrate_timings`. * @return value or {@code null} for none */ public java.util.List<String> getVibrateTimings() { return vibrateTimings; } /** * Set the vibration pattern to use. Pass in an array of * [protobuf.Duration](https://developers.google.com/protocol- * buffers/docs/reference/google.protobuf#google.protobuf.Duration) to turn on or off the * vibrator. The first value indicates the `Duration` to wait before turning the vibrator on. The * next value indicates the `Duration` to keep the vibrator on. Subsequent values alternate * between `Duration` to turn the vibrator off and to turn the vibrator on. If `vibrate_timings` * is set and `default_vibrate_timings` is set to `true`, the default value is used instead of the * user-specified `vibrate_timings`. * @param vibrateTimings vibrateTimings or {@code null} for none */ public AndroidNotification setVibrateTimings(java.util.List<String> vibrateTimings) { this.vibrateTimings = vibrateTimings; return this; } /** * Set the [Notification.visibility](https://developer.android.com/reference/android/app/Notificat * ion.html#visibility) of the notification. * @return value or {@code null} for none */ public java.lang.String getVisibility() { return visibility; } /** * Set the [Notification.visibility](https://developer.android.com/reference/android/app/Notificat * ion.html#visibility) of the notification. * @param visibility visibility or {@code null} for none */ public AndroidNotification setVisibility(java.lang.String visibility) { this.visibility = visibility; return this; } @Override public AndroidNotification set(String fieldName, Object value) { return (AndroidNotification) super.set(fieldName, value); } @Override public AndroidNotification clone() { return (AndroidNotification) super.clone(); } }
googleapis/google-cloud-java
35,024
java-appengine-admin/proto-google-cloud-appengine-admin-v1/src/main/java/com/google/appengine/v1/ListInstancesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/appengine/v1/appengine.proto // Protobuf Java Version: 3.25.8 package com.google.appengine.v1; /** * * * <pre> * Response message for `Instances.ListInstances`. * </pre> * * Protobuf type {@code google.appengine.v1.ListInstancesResponse} */ public final class ListInstancesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.appengine.v1.ListInstancesResponse) ListInstancesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListInstancesResponse.newBuilder() to construct. private ListInstancesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListInstancesResponse() { instances_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListInstancesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.appengine.v1.AppengineProto .internal_static_google_appengine_v1_ListInstancesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.appengine.v1.AppengineProto .internal_static_google_appengine_v1_ListInstancesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.appengine.v1.ListInstancesResponse.class, com.google.appengine.v1.ListInstancesResponse.Builder.class); } public static final int INSTANCES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.appengine.v1.Instance> instances_; /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ @java.lang.Override public java.util.List<com.google.appengine.v1.Instance> getInstancesList() { return instances_; } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.appengine.v1.InstanceOrBuilder> getInstancesOrBuilderList() { return instances_; } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ @java.lang.Override public int getInstancesCount() { return instances_.size(); } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ @java.lang.Override public com.google.appengine.v1.Instance getInstances(int index) { return instances_.get(index); } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ @java.lang.Override public com.google.appengine.v1.InstanceOrBuilder getInstancesOrBuilder(int index) { return instances_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Continuation token for fetching the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Continuation token for fetching the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < instances_.size(); i++) { output.writeMessage(1, instances_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < instances_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, instances_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.appengine.v1.ListInstancesResponse)) { return super.equals(obj); } com.google.appengine.v1.ListInstancesResponse other = (com.google.appengine.v1.ListInstancesResponse) obj; if (!getInstancesList().equals(other.getInstancesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getInstancesCount() > 0) { hash = (37 * hash) + INSTANCES_FIELD_NUMBER; hash = (53 * hash) + getInstancesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.appengine.v1.ListInstancesResponse parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.appengine.v1.ListInstancesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.appengine.v1.ListInstancesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.appengine.v1.ListInstancesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.appengine.v1.ListInstancesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.appengine.v1.ListInstancesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.appengine.v1.ListInstancesResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.appengine.v1.ListInstancesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.appengine.v1.ListInstancesResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.appengine.v1.ListInstancesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.appengine.v1.ListInstancesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.appengine.v1.ListInstancesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.appengine.v1.ListInstancesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for `Instances.ListInstances`. * </pre> * * Protobuf type {@code google.appengine.v1.ListInstancesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.appengine.v1.ListInstancesResponse) com.google.appengine.v1.ListInstancesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.appengine.v1.AppengineProto .internal_static_google_appengine_v1_ListInstancesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.appengine.v1.AppengineProto .internal_static_google_appengine_v1_ListInstancesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.appengine.v1.ListInstancesResponse.class, com.google.appengine.v1.ListInstancesResponse.Builder.class); } // Construct using com.google.appengine.v1.ListInstancesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (instancesBuilder_ == null) { instances_ = java.util.Collections.emptyList(); } else { instances_ = null; instancesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.appengine.v1.AppengineProto .internal_static_google_appengine_v1_ListInstancesResponse_descriptor; } @java.lang.Override public com.google.appengine.v1.ListInstancesResponse getDefaultInstanceForType() { return com.google.appengine.v1.ListInstancesResponse.getDefaultInstance(); } @java.lang.Override public com.google.appengine.v1.ListInstancesResponse build() { com.google.appengine.v1.ListInstancesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.appengine.v1.ListInstancesResponse buildPartial() { com.google.appengine.v1.ListInstancesResponse result = new com.google.appengine.v1.ListInstancesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(com.google.appengine.v1.ListInstancesResponse result) { if (instancesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { instances_ = java.util.Collections.unmodifiableList(instances_); bitField0_ = (bitField0_ & ~0x00000001); } result.instances_ = instances_; } else { result.instances_ = instancesBuilder_.build(); } } private void buildPartial0(com.google.appengine.v1.ListInstancesResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.appengine.v1.ListInstancesResponse) { return mergeFrom((com.google.appengine.v1.ListInstancesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.appengine.v1.ListInstancesResponse other) { if (other == com.google.appengine.v1.ListInstancesResponse.getDefaultInstance()) return this; if (instancesBuilder_ == null) { if (!other.instances_.isEmpty()) { if (instances_.isEmpty()) { instances_ = other.instances_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureInstancesIsMutable(); instances_.addAll(other.instances_); } onChanged(); } } else { if (!other.instances_.isEmpty()) { if (instancesBuilder_.isEmpty()) { instancesBuilder_.dispose(); instancesBuilder_ = null; instances_ = other.instances_; bitField0_ = (bitField0_ & ~0x00000001); instancesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getInstancesFieldBuilder() : null; } else { instancesBuilder_.addAllMessages(other.instances_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.appengine.v1.Instance m = input.readMessage(com.google.appengine.v1.Instance.parser(), extensionRegistry); if (instancesBuilder_ == null) { ensureInstancesIsMutable(); instances_.add(m); } else { instancesBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.appengine.v1.Instance> instances_ = java.util.Collections.emptyList(); private void ensureInstancesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { instances_ = new java.util.ArrayList<com.google.appengine.v1.Instance>(instances_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.appengine.v1.Instance, com.google.appengine.v1.Instance.Builder, com.google.appengine.v1.InstanceOrBuilder> instancesBuilder_; /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public java.util.List<com.google.appengine.v1.Instance> getInstancesList() { if (instancesBuilder_ == null) { return java.util.Collections.unmodifiableList(instances_); } else { return instancesBuilder_.getMessageList(); } } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public int getInstancesCount() { if (instancesBuilder_ == null) { return instances_.size(); } else { return instancesBuilder_.getCount(); } } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public com.google.appengine.v1.Instance getInstances(int index) { if (instancesBuilder_ == null) { return instances_.get(index); } else { return instancesBuilder_.getMessage(index); } } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public Builder setInstances(int index, com.google.appengine.v1.Instance value) { if (instancesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureInstancesIsMutable(); instances_.set(index, value); onChanged(); } else { instancesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public Builder setInstances( int index, com.google.appengine.v1.Instance.Builder builderForValue) { if (instancesBuilder_ == null) { ensureInstancesIsMutable(); instances_.set(index, builderForValue.build()); onChanged(); } else { instancesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public Builder addInstances(com.google.appengine.v1.Instance value) { if (instancesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureInstancesIsMutable(); instances_.add(value); onChanged(); } else { instancesBuilder_.addMessage(value); } return this; } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public Builder addInstances(int index, com.google.appengine.v1.Instance value) { if (instancesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureInstancesIsMutable(); instances_.add(index, value); onChanged(); } else { instancesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public Builder addInstances(com.google.appengine.v1.Instance.Builder builderForValue) { if (instancesBuilder_ == null) { ensureInstancesIsMutable(); instances_.add(builderForValue.build()); onChanged(); } else { instancesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public Builder addInstances( int index, com.google.appengine.v1.Instance.Builder builderForValue) { if (instancesBuilder_ == null) { ensureInstancesIsMutable(); instances_.add(index, builderForValue.build()); onChanged(); } else { instancesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public Builder addAllInstances( java.lang.Iterable<? extends com.google.appengine.v1.Instance> values) { if (instancesBuilder_ == null) { ensureInstancesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, instances_); onChanged(); } else { instancesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public Builder clearInstances() { if (instancesBuilder_ == null) { instances_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { instancesBuilder_.clear(); } return this; } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public Builder removeInstances(int index) { if (instancesBuilder_ == null) { ensureInstancesIsMutable(); instances_.remove(index); onChanged(); } else { instancesBuilder_.remove(index); } return this; } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public com.google.appengine.v1.Instance.Builder getInstancesBuilder(int index) { return getInstancesFieldBuilder().getBuilder(index); } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public com.google.appengine.v1.InstanceOrBuilder getInstancesOrBuilder(int index) { if (instancesBuilder_ == null) { return instances_.get(index); } else { return instancesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public java.util.List<? extends com.google.appengine.v1.InstanceOrBuilder> getInstancesOrBuilderList() { if (instancesBuilder_ != null) { return instancesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(instances_); } } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public com.google.appengine.v1.Instance.Builder addInstancesBuilder() { return getInstancesFieldBuilder() .addBuilder(com.google.appengine.v1.Instance.getDefaultInstance()); } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public com.google.appengine.v1.Instance.Builder addInstancesBuilder(int index) { return getInstancesFieldBuilder() .addBuilder(index, com.google.appengine.v1.Instance.getDefaultInstance()); } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public java.util.List<com.google.appengine.v1.Instance.Builder> getInstancesBuilderList() { return getInstancesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.appengine.v1.Instance, com.google.appengine.v1.Instance.Builder, com.google.appengine.v1.InstanceOrBuilder> getInstancesFieldBuilder() { if (instancesBuilder_ == null) { instancesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.appengine.v1.Instance, com.google.appengine.v1.Instance.Builder, com.google.appengine.v1.InstanceOrBuilder>( instances_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); instances_ = null; } return instancesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Continuation token for fetching the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Continuation token for fetching the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Continuation token for fetching the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Continuation token for fetching the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Continuation token for fetching the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.appengine.v1.ListInstancesResponse) } // @@protoc_insertion_point(class_scope:google.appengine.v1.ListInstancesResponse) private static final com.google.appengine.v1.ListInstancesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.appengine.v1.ListInstancesResponse(); } public static com.google.appengine.v1.ListInstancesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListInstancesResponse> PARSER = new com.google.protobuf.AbstractParser<ListInstancesResponse>() { @java.lang.Override public ListInstancesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListInstancesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListInstancesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.appengine.v1.ListInstancesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,139
java-alloydb/proto-google-cloud-alloydb-v1/src/main/java/com/google/cloud/alloydb/v1/FailoverInstanceRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/alloydb/v1/service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.alloydb.v1; /** * * * <pre> * Message for triggering failover on an Instance * </pre> * * Protobuf type {@code google.cloud.alloydb.v1.FailoverInstanceRequest} */ public final class FailoverInstanceRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.alloydb.v1.FailoverInstanceRequest) FailoverInstanceRequestOrBuilder { private static final long serialVersionUID = 0L; // Use FailoverInstanceRequest.newBuilder() to construct. private FailoverInstanceRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private FailoverInstanceRequest() { name_ = ""; requestId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new FailoverInstanceRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.alloydb.v1.ServiceProto .internal_static_google_cloud_alloydb_v1_FailoverInstanceRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.alloydb.v1.ServiceProto .internal_static_google_cloud_alloydb_v1_FailoverInstanceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.alloydb.v1.FailoverInstanceRequest.class, com.google.cloud.alloydb.v1.FailoverInstanceRequest.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * Required. The name of the resource. For the required format, see the * comment on the Instance.name field. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Required. The name of the resource. For the required format, see the * comment on the Instance.name field. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int REQUEST_ID_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server ignores the * request if it has already been completed. The server guarantees that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if the original operation with the same request ID * was received, and if so, ignores the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The requestId. */ @java.lang.Override public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server ignores the * request if it has already been completed. The server guarantees that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if the original operation with the same request ID * was received, and if so, ignores the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for requestId. */ @java.lang.Override public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int VALIDATE_ONLY_FIELD_NUMBER = 3; private boolean validateOnly_ = false; /** * * * <pre> * Optional. If set, performs request validation, for example, permission * checks and any other type of validation, but does not actually execute the * create request. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, requestId_); } if (validateOnly_ != false) { output.writeBool(3, validateOnly_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, requestId_); } if (validateOnly_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, validateOnly_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.alloydb.v1.FailoverInstanceRequest)) { return super.equals(obj); } com.google.cloud.alloydb.v1.FailoverInstanceRequest other = (com.google.cloud.alloydb.v1.FailoverInstanceRequest) obj; if (!getName().equals(other.getName())) return false; if (!getRequestId().equals(other.getRequestId())) return false; if (getValidateOnly() != other.getValidateOnly()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER; hash = (53 * hash) + getRequestId().hashCode(); hash = (37 * hash) + VALIDATE_ONLY_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getValidateOnly()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.alloydb.v1.FailoverInstanceRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.v1.FailoverInstanceRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.v1.FailoverInstanceRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.v1.FailoverInstanceRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.v1.FailoverInstanceRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.v1.FailoverInstanceRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.v1.FailoverInstanceRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.alloydb.v1.FailoverInstanceRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.alloydb.v1.FailoverInstanceRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.alloydb.v1.FailoverInstanceRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.alloydb.v1.FailoverInstanceRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.alloydb.v1.FailoverInstanceRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.alloydb.v1.FailoverInstanceRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Message for triggering failover on an Instance * </pre> * * Protobuf type {@code google.cloud.alloydb.v1.FailoverInstanceRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.alloydb.v1.FailoverInstanceRequest) com.google.cloud.alloydb.v1.FailoverInstanceRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.alloydb.v1.ServiceProto .internal_static_google_cloud_alloydb_v1_FailoverInstanceRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.alloydb.v1.ServiceProto .internal_static_google_cloud_alloydb_v1_FailoverInstanceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.alloydb.v1.FailoverInstanceRequest.class, com.google.cloud.alloydb.v1.FailoverInstanceRequest.Builder.class); } // Construct using com.google.cloud.alloydb.v1.FailoverInstanceRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; requestId_ = ""; validateOnly_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.alloydb.v1.ServiceProto .internal_static_google_cloud_alloydb_v1_FailoverInstanceRequest_descriptor; } @java.lang.Override public com.google.cloud.alloydb.v1.FailoverInstanceRequest getDefaultInstanceForType() { return com.google.cloud.alloydb.v1.FailoverInstanceRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.alloydb.v1.FailoverInstanceRequest build() { com.google.cloud.alloydb.v1.FailoverInstanceRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.alloydb.v1.FailoverInstanceRequest buildPartial() { com.google.cloud.alloydb.v1.FailoverInstanceRequest result = new com.google.cloud.alloydb.v1.FailoverInstanceRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.alloydb.v1.FailoverInstanceRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.requestId_ = requestId_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.validateOnly_ = validateOnly_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.alloydb.v1.FailoverInstanceRequest) { return mergeFrom((com.google.cloud.alloydb.v1.FailoverInstanceRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.alloydb.v1.FailoverInstanceRequest other) { if (other == com.google.cloud.alloydb.v1.FailoverInstanceRequest.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getRequestId().isEmpty()) { requestId_ = other.requestId_; bitField0_ |= 0x00000002; onChanged(); } if (other.getValidateOnly() != false) { setValidateOnly(other.getValidateOnly()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { requestId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { validateOnly_ = input.readBool(); bitField0_ |= 0x00000004; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * * * <pre> * Required. The name of the resource. For the required format, see the * comment on the Instance.name field. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The name of the resource. For the required format, see the * comment on the Instance.name field. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The name of the resource. For the required format, see the * comment on the Instance.name field. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The name of the resource. For the required format, see the * comment on the Instance.name field. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The name of the resource. For the required format, see the * comment on the Instance.name field. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server ignores the * request if it has already been completed. The server guarantees that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if the original operation with the same request ID * was received, and if so, ignores the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The requestId. */ public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server ignores the * request if it has already been completed. The server guarantees that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if the original operation with the same request ID * was received, and if so, ignores the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for requestId. */ public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server ignores the * request if it has already been completed. The server guarantees that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if the original operation with the same request ID * was received, and if so, ignores the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The requestId to set. * @return This builder for chaining. */ public Builder setRequestId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } requestId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server ignores the * request if it has already been completed. The server guarantees that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if the original operation with the same request ID * was received, and if so, ignores the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearRequestId() { requestId_ = getDefaultInstance().getRequestId(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server ignores the * request if it has already been completed. The server guarantees that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if the original operation with the same request ID * was received, and if so, ignores the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for requestId to set. * @return This builder for chaining. */ public Builder setRequestIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); requestId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private boolean validateOnly_; /** * * * <pre> * Optional. If set, performs request validation, for example, permission * checks and any other type of validation, but does not actually execute the * create request. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } /** * * * <pre> * Optional. If set, performs request validation, for example, permission * checks and any other type of validation, but does not actually execute the * create request. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The validateOnly to set. * @return This builder for chaining. */ public Builder setValidateOnly(boolean value) { validateOnly_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. If set, performs request validation, for example, permission * checks and any other type of validation, but does not actually execute the * create request. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearValidateOnly() { bitField0_ = (bitField0_ & ~0x00000004); validateOnly_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.alloydb.v1.FailoverInstanceRequest) } // @@protoc_insertion_point(class_scope:google.cloud.alloydb.v1.FailoverInstanceRequest) private static final com.google.cloud.alloydb.v1.FailoverInstanceRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.alloydb.v1.FailoverInstanceRequest(); } public static com.google.cloud.alloydb.v1.FailoverInstanceRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<FailoverInstanceRequest> PARSER = new com.google.protobuf.AbstractParser<FailoverInstanceRequest>() { @java.lang.Override public FailoverInstanceRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<FailoverInstanceRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<FailoverInstanceRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.alloydb.v1.FailoverInstanceRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,139
java-functions/proto-google-cloud-functions-v2beta/src/main/java/com/google/cloud/functions/v2beta/GenerateUploadUrlResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/functions/v2beta/functions.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.functions.v2beta; /** * * * <pre> * Response of `GenerateSourceUploadUrl` method. * </pre> * * Protobuf type {@code google.cloud.functions.v2beta.GenerateUploadUrlResponse} */ public final class GenerateUploadUrlResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.functions.v2beta.GenerateUploadUrlResponse) GenerateUploadUrlResponseOrBuilder { private static final long serialVersionUID = 0L; // Use GenerateUploadUrlResponse.newBuilder() to construct. private GenerateUploadUrlResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private GenerateUploadUrlResponse() { uploadUrl_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new GenerateUploadUrlResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.functions.v2beta.FunctionsProto .internal_static_google_cloud_functions_v2beta_GenerateUploadUrlResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.functions.v2beta.FunctionsProto .internal_static_google_cloud_functions_v2beta_GenerateUploadUrlResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.functions.v2beta.GenerateUploadUrlResponse.class, com.google.cloud.functions.v2beta.GenerateUploadUrlResponse.Builder.class); } private int bitField0_; public static final int UPLOAD_URL_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object uploadUrl_ = ""; /** * * * <pre> * The generated Google Cloud Storage signed URL that should be used for a * function source code upload. The uploaded file should be a zip archive * which contains a function. * </pre> * * <code>string upload_url = 1;</code> * * @return The uploadUrl. */ @java.lang.Override public java.lang.String getUploadUrl() { java.lang.Object ref = uploadUrl_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); uploadUrl_ = s; return s; } } /** * * * <pre> * The generated Google Cloud Storage signed URL that should be used for a * function source code upload. The uploaded file should be a zip archive * which contains a function. * </pre> * * <code>string upload_url = 1;</code> * * @return The bytes for uploadUrl. */ @java.lang.Override public com.google.protobuf.ByteString getUploadUrlBytes() { java.lang.Object ref = uploadUrl_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); uploadUrl_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int STORAGE_SOURCE_FIELD_NUMBER = 2; private com.google.cloud.functions.v2beta.StorageSource storageSource_; /** * * * <pre> * The location of the source code in the upload bucket. * * Once the archive is uploaded using the `upload_url` use this field to * set the `function.build_config.source.storage_source` * during CreateFunction and UpdateFunction. * * Generation defaults to 0, as Cloud Storage provides a new generation only * upon uploading a new object or version of an object. * </pre> * * <code>.google.cloud.functions.v2beta.StorageSource storage_source = 2;</code> * * @return Whether the storageSource field is set. */ @java.lang.Override public boolean hasStorageSource() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The location of the source code in the upload bucket. * * Once the archive is uploaded using the `upload_url` use this field to * set the `function.build_config.source.storage_source` * during CreateFunction and UpdateFunction. * * Generation defaults to 0, as Cloud Storage provides a new generation only * upon uploading a new object or version of an object. * </pre> * * <code>.google.cloud.functions.v2beta.StorageSource storage_source = 2;</code> * * @return The storageSource. */ @java.lang.Override public com.google.cloud.functions.v2beta.StorageSource getStorageSource() { return storageSource_ == null ? com.google.cloud.functions.v2beta.StorageSource.getDefaultInstance() : storageSource_; } /** * * * <pre> * The location of the source code in the upload bucket. * * Once the archive is uploaded using the `upload_url` use this field to * set the `function.build_config.source.storage_source` * during CreateFunction and UpdateFunction. * * Generation defaults to 0, as Cloud Storage provides a new generation only * upon uploading a new object or version of an object. * </pre> * * <code>.google.cloud.functions.v2beta.StorageSource storage_source = 2;</code> */ @java.lang.Override public com.google.cloud.functions.v2beta.StorageSourceOrBuilder getStorageSourceOrBuilder() { return storageSource_ == null ? com.google.cloud.functions.v2beta.StorageSource.getDefaultInstance() : storageSource_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(uploadUrl_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, uploadUrl_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getStorageSource()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(uploadUrl_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, uploadUrl_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getStorageSource()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.functions.v2beta.GenerateUploadUrlResponse)) { return super.equals(obj); } com.google.cloud.functions.v2beta.GenerateUploadUrlResponse other = (com.google.cloud.functions.v2beta.GenerateUploadUrlResponse) obj; if (!getUploadUrl().equals(other.getUploadUrl())) return false; if (hasStorageSource() != other.hasStorageSource()) return false; if (hasStorageSource()) { if (!getStorageSource().equals(other.getStorageSource())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + UPLOAD_URL_FIELD_NUMBER; hash = (53 * hash) + getUploadUrl().hashCode(); if (hasStorageSource()) { hash = (37 * hash) + STORAGE_SOURCE_FIELD_NUMBER; hash = (53 * hash) + getStorageSource().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.functions.v2beta.GenerateUploadUrlResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.functions.v2beta.GenerateUploadUrlResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.functions.v2beta.GenerateUploadUrlResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.functions.v2beta.GenerateUploadUrlResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.functions.v2beta.GenerateUploadUrlResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.functions.v2beta.GenerateUploadUrlResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.functions.v2beta.GenerateUploadUrlResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.functions.v2beta.GenerateUploadUrlResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.functions.v2beta.GenerateUploadUrlResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.functions.v2beta.GenerateUploadUrlResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.functions.v2beta.GenerateUploadUrlResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.functions.v2beta.GenerateUploadUrlResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.functions.v2beta.GenerateUploadUrlResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response of `GenerateSourceUploadUrl` method. * </pre> * * Protobuf type {@code google.cloud.functions.v2beta.GenerateUploadUrlResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.functions.v2beta.GenerateUploadUrlResponse) com.google.cloud.functions.v2beta.GenerateUploadUrlResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.functions.v2beta.FunctionsProto .internal_static_google_cloud_functions_v2beta_GenerateUploadUrlResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.functions.v2beta.FunctionsProto .internal_static_google_cloud_functions_v2beta_GenerateUploadUrlResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.functions.v2beta.GenerateUploadUrlResponse.class, com.google.cloud.functions.v2beta.GenerateUploadUrlResponse.Builder.class); } // Construct using com.google.cloud.functions.v2beta.GenerateUploadUrlResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getStorageSourceFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; uploadUrl_ = ""; storageSource_ = null; if (storageSourceBuilder_ != null) { storageSourceBuilder_.dispose(); storageSourceBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.functions.v2beta.FunctionsProto .internal_static_google_cloud_functions_v2beta_GenerateUploadUrlResponse_descriptor; } @java.lang.Override public com.google.cloud.functions.v2beta.GenerateUploadUrlResponse getDefaultInstanceForType() { return com.google.cloud.functions.v2beta.GenerateUploadUrlResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.functions.v2beta.GenerateUploadUrlResponse build() { com.google.cloud.functions.v2beta.GenerateUploadUrlResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.functions.v2beta.GenerateUploadUrlResponse buildPartial() { com.google.cloud.functions.v2beta.GenerateUploadUrlResponse result = new com.google.cloud.functions.v2beta.GenerateUploadUrlResponse(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.functions.v2beta.GenerateUploadUrlResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.uploadUrl_ = uploadUrl_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.storageSource_ = storageSourceBuilder_ == null ? storageSource_ : storageSourceBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.functions.v2beta.GenerateUploadUrlResponse) { return mergeFrom((com.google.cloud.functions.v2beta.GenerateUploadUrlResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.functions.v2beta.GenerateUploadUrlResponse other) { if (other == com.google.cloud.functions.v2beta.GenerateUploadUrlResponse.getDefaultInstance()) return this; if (!other.getUploadUrl().isEmpty()) { uploadUrl_ = other.uploadUrl_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasStorageSource()) { mergeStorageSource(other.getStorageSource()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { uploadUrl_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getStorageSourceFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object uploadUrl_ = ""; /** * * * <pre> * The generated Google Cloud Storage signed URL that should be used for a * function source code upload. The uploaded file should be a zip archive * which contains a function. * </pre> * * <code>string upload_url = 1;</code> * * @return The uploadUrl. */ public java.lang.String getUploadUrl() { java.lang.Object ref = uploadUrl_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); uploadUrl_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The generated Google Cloud Storage signed URL that should be used for a * function source code upload. The uploaded file should be a zip archive * which contains a function. * </pre> * * <code>string upload_url = 1;</code> * * @return The bytes for uploadUrl. */ public com.google.protobuf.ByteString getUploadUrlBytes() { java.lang.Object ref = uploadUrl_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); uploadUrl_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The generated Google Cloud Storage signed URL that should be used for a * function source code upload. The uploaded file should be a zip archive * which contains a function. * </pre> * * <code>string upload_url = 1;</code> * * @param value The uploadUrl to set. * @return This builder for chaining. */ public Builder setUploadUrl(java.lang.String value) { if (value == null) { throw new NullPointerException(); } uploadUrl_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The generated Google Cloud Storage signed URL that should be used for a * function source code upload. The uploaded file should be a zip archive * which contains a function. * </pre> * * <code>string upload_url = 1;</code> * * @return This builder for chaining. */ public Builder clearUploadUrl() { uploadUrl_ = getDefaultInstance().getUploadUrl(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * The generated Google Cloud Storage signed URL that should be used for a * function source code upload. The uploaded file should be a zip archive * which contains a function. * </pre> * * <code>string upload_url = 1;</code> * * @param value The bytes for uploadUrl to set. * @return This builder for chaining. */ public Builder setUploadUrlBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); uploadUrl_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.cloud.functions.v2beta.StorageSource storageSource_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.functions.v2beta.StorageSource, com.google.cloud.functions.v2beta.StorageSource.Builder, com.google.cloud.functions.v2beta.StorageSourceOrBuilder> storageSourceBuilder_; /** * * * <pre> * The location of the source code in the upload bucket. * * Once the archive is uploaded using the `upload_url` use this field to * set the `function.build_config.source.storage_source` * during CreateFunction and UpdateFunction. * * Generation defaults to 0, as Cloud Storage provides a new generation only * upon uploading a new object or version of an object. * </pre> * * <code>.google.cloud.functions.v2beta.StorageSource storage_source = 2;</code> * * @return Whether the storageSource field is set. */ public boolean hasStorageSource() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The location of the source code in the upload bucket. * * Once the archive is uploaded using the `upload_url` use this field to * set the `function.build_config.source.storage_source` * during CreateFunction and UpdateFunction. * * Generation defaults to 0, as Cloud Storage provides a new generation only * upon uploading a new object or version of an object. * </pre> * * <code>.google.cloud.functions.v2beta.StorageSource storage_source = 2;</code> * * @return The storageSource. */ public com.google.cloud.functions.v2beta.StorageSource getStorageSource() { if (storageSourceBuilder_ == null) { return storageSource_ == null ? com.google.cloud.functions.v2beta.StorageSource.getDefaultInstance() : storageSource_; } else { return storageSourceBuilder_.getMessage(); } } /** * * * <pre> * The location of the source code in the upload bucket. * * Once the archive is uploaded using the `upload_url` use this field to * set the `function.build_config.source.storage_source` * during CreateFunction and UpdateFunction. * * Generation defaults to 0, as Cloud Storage provides a new generation only * upon uploading a new object or version of an object. * </pre> * * <code>.google.cloud.functions.v2beta.StorageSource storage_source = 2;</code> */ public Builder setStorageSource(com.google.cloud.functions.v2beta.StorageSource value) { if (storageSourceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } storageSource_ = value; } else { storageSourceBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The location of the source code in the upload bucket. * * Once the archive is uploaded using the `upload_url` use this field to * set the `function.build_config.source.storage_source` * during CreateFunction and UpdateFunction. * * Generation defaults to 0, as Cloud Storage provides a new generation only * upon uploading a new object or version of an object. * </pre> * * <code>.google.cloud.functions.v2beta.StorageSource storage_source = 2;</code> */ public Builder setStorageSource( com.google.cloud.functions.v2beta.StorageSource.Builder builderForValue) { if (storageSourceBuilder_ == null) { storageSource_ = builderForValue.build(); } else { storageSourceBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The location of the source code in the upload bucket. * * Once the archive is uploaded using the `upload_url` use this field to * set the `function.build_config.source.storage_source` * during CreateFunction and UpdateFunction. * * Generation defaults to 0, as Cloud Storage provides a new generation only * upon uploading a new object or version of an object. * </pre> * * <code>.google.cloud.functions.v2beta.StorageSource storage_source = 2;</code> */ public Builder mergeStorageSource(com.google.cloud.functions.v2beta.StorageSource value) { if (storageSourceBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && storageSource_ != null && storageSource_ != com.google.cloud.functions.v2beta.StorageSource.getDefaultInstance()) { getStorageSourceBuilder().mergeFrom(value); } else { storageSource_ = value; } } else { storageSourceBuilder_.mergeFrom(value); } if (storageSource_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * The location of the source code in the upload bucket. * * Once the archive is uploaded using the `upload_url` use this field to * set the `function.build_config.source.storage_source` * during CreateFunction and UpdateFunction. * * Generation defaults to 0, as Cloud Storage provides a new generation only * upon uploading a new object or version of an object. * </pre> * * <code>.google.cloud.functions.v2beta.StorageSource storage_source = 2;</code> */ public Builder clearStorageSource() { bitField0_ = (bitField0_ & ~0x00000002); storageSource_ = null; if (storageSourceBuilder_ != null) { storageSourceBuilder_.dispose(); storageSourceBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The location of the source code in the upload bucket. * * Once the archive is uploaded using the `upload_url` use this field to * set the `function.build_config.source.storage_source` * during CreateFunction and UpdateFunction. * * Generation defaults to 0, as Cloud Storage provides a new generation only * upon uploading a new object or version of an object. * </pre> * * <code>.google.cloud.functions.v2beta.StorageSource storage_source = 2;</code> */ public com.google.cloud.functions.v2beta.StorageSource.Builder getStorageSourceBuilder() { bitField0_ |= 0x00000002; onChanged(); return getStorageSourceFieldBuilder().getBuilder(); } /** * * * <pre> * The location of the source code in the upload bucket. * * Once the archive is uploaded using the `upload_url` use this field to * set the `function.build_config.source.storage_source` * during CreateFunction and UpdateFunction. * * Generation defaults to 0, as Cloud Storage provides a new generation only * upon uploading a new object or version of an object. * </pre> * * <code>.google.cloud.functions.v2beta.StorageSource storage_source = 2;</code> */ public com.google.cloud.functions.v2beta.StorageSourceOrBuilder getStorageSourceOrBuilder() { if (storageSourceBuilder_ != null) { return storageSourceBuilder_.getMessageOrBuilder(); } else { return storageSource_ == null ? com.google.cloud.functions.v2beta.StorageSource.getDefaultInstance() : storageSource_; } } /** * * * <pre> * The location of the source code in the upload bucket. * * Once the archive is uploaded using the `upload_url` use this field to * set the `function.build_config.source.storage_source` * during CreateFunction and UpdateFunction. * * Generation defaults to 0, as Cloud Storage provides a new generation only * upon uploading a new object or version of an object. * </pre> * * <code>.google.cloud.functions.v2beta.StorageSource storage_source = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.functions.v2beta.StorageSource, com.google.cloud.functions.v2beta.StorageSource.Builder, com.google.cloud.functions.v2beta.StorageSourceOrBuilder> getStorageSourceFieldBuilder() { if (storageSourceBuilder_ == null) { storageSourceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.functions.v2beta.StorageSource, com.google.cloud.functions.v2beta.StorageSource.Builder, com.google.cloud.functions.v2beta.StorageSourceOrBuilder>( getStorageSource(), getParentForChildren(), isClean()); storageSource_ = null; } return storageSourceBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.functions.v2beta.GenerateUploadUrlResponse) } // @@protoc_insertion_point(class_scope:google.cloud.functions.v2beta.GenerateUploadUrlResponse) private static final com.google.cloud.functions.v2beta.GenerateUploadUrlResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.functions.v2beta.GenerateUploadUrlResponse(); } public static com.google.cloud.functions.v2beta.GenerateUploadUrlResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<GenerateUploadUrlResponse> PARSER = new com.google.protobuf.AbstractParser<GenerateUploadUrlResponse>() { @java.lang.Override public GenerateUploadUrlResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<GenerateUploadUrlResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<GenerateUploadUrlResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.functions.v2beta.GenerateUploadUrlResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/incubator-retired-wave
35,216
wave/src/main/java/org/waveprotocol/wave/concurrencycontrol/channel/OperationChannelMultiplexerImpl.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.waveprotocol.wave.concurrencycontrol.channel; import static org.waveprotocol.wave.model.wave.Constants.NO_VERSION; import org.waveprotocol.wave.common.logging.LoggerBundle; import org.waveprotocol.wave.concurrencycontrol.client.ConcurrencyControl; import org.waveprotocol.wave.concurrencycontrol.common.ChannelException; import org.waveprotocol.wave.concurrencycontrol.common.CorruptionDetail; import org.waveprotocol.wave.concurrencycontrol.common.Recoverable; import org.waveprotocol.wave.concurrencycontrol.common.ResponseCode; import org.waveprotocol.wave.concurrencycontrol.common.UnsavedDataListenerFactory; import org.waveprotocol.wave.model.id.IdFilter; import org.waveprotocol.wave.model.id.WaveId; import org.waveprotocol.wave.model.id.WaveletId; import org.waveprotocol.wave.model.id.WaveletName; import org.waveprotocol.wave.model.operation.wave.TransformedWaveletDelta; import org.waveprotocol.wave.model.operation.wave.WaveletDelta; import org.waveprotocol.wave.model.operation.wave.WaveletOperation; import org.waveprotocol.wave.model.util.CollectionUtils; import org.waveprotocol.wave.model.util.FuzzingBackOffScheduler; import org.waveprotocol.wave.model.util.Preconditions; import org.waveprotocol.wave.model.util.Scheduler; import org.waveprotocol.wave.model.version.HashedVersion; import org.waveprotocol.wave.model.version.HashedVersionFactory; import org.waveprotocol.wave.model.wave.ParticipantId; import org.waveprotocol.wave.model.wave.data.ObservableWaveletData; import org.waveprotocol.wave.model.wave.data.impl.EmptyWaveletSnapshot; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; /** * Multiplexes several {@link OperationChannel operation channels} over one * {@link ViewChannel view channel}. * * * |- OperationChannelMultiplexer -----------------------------------------| * | | * | |-Stacklet---------------------------------| | * | | OperationChannel <-> WaveletDeltaChannel |-| | * <-> | |------------------------------------------| |-| <=> View Channel | <-> WaveService * | |------------------------------------------| | | * | |------------------------------------------| | * | | * | All exceptions are directed here | * |-----------------------------------------------------------------------| * * Note: * * All exceptions that are emitted from using the OperationChannel or * OperationChannelMultiplexer interfaces are caught in this class. * i.e. when the client calls methods from the left part of the diagram. * * All exceptions generated as a result of handling server messages in ViewChannel * are routed here through onException(). i.e. when the WaveService calls methods on * the right part of the diagram through call backs. * * This class is responsible for reporting all the exceptions to the user. * */ public class OperationChannelMultiplexerImpl implements OperationChannelMultiplexer { /** * Binds together both ends of a delta channel. */ interface MultiplexedDeltaChannel extends WaveletDeltaChannel, WaveletChannel.Listener { } /** * Factory for creating delta channels. */ interface DeltaChannelFactory { /** * Creates a delta channel. * * @param waveletChannel channel through which the delta channel * communicates */ MultiplexedDeltaChannel create(WaveletChannel waveletChannel); } /** * Factory for operation channels. */ interface OperationChannelFactory { /** * Creates an operation channel. * * @param deltaChannel channel through which the op channel communicates * @param waveletId wavelet id for the new operation channel * @param startVersion the version to start from * @param accessibility accessibility of the new channel * @return a new operation channel. */ InternalOperationChannel create(WaveletDeltaChannel deltaChannel, WaveletId waveletId, HashedVersion startVersion, Accessibility accessibility); } /** * A per-wavelet stack above this multiplexer. A stacklet forwards message * from the server to a listener at the bottom of the stacklet (a delta * channel). When communications fail a stacklet fetches reconnection version * from the contained operation channel. */ private static class Stacklet implements WaveletChannel.Listener { private final MultiplexedDeltaChannel deltaChannel; private final InternalOperationChannel opChannel; private boolean firstMessageReceived; private boolean dropAdditionalSnapshot; /** * Creates a stacklet. * * @param deltaChannel delta channel at the bottom of the stacklet * @param opChannel operation channel at the top of the stacklet * @param dropSnapshot whether to expect and drop an additional snapshot * after the first message. */ private Stacklet(MultiplexedDeltaChannel deltaChannel, InternalOperationChannel opChannel, boolean dropSnapshot) { this.deltaChannel = deltaChannel; this.opChannel = opChannel; this.firstMessageReceived = false; this.dropAdditionalSnapshot = dropSnapshot; } public void onWaveletSnapshot(ObservableWaveletData wavelet, HashedVersion lastCommittedVersion, HashedVersion currentVersion) throws ChannelException { // When a channel is created locally we fake an initial empty // snapshot. The server still sends one when it creates the wavelet // though, so it's dropped it here if that's expected. // See createOperationChannel(). if (!firstMessageReceived) { firstMessageReceived = true; } else if (dropAdditionalSnapshot) { // TODO(anorth): check the snapshot is as expected, even though // it's dropped. dropAdditionalSnapshot = false; return; } deltaChannel.onWaveletSnapshot(wavelet, lastCommittedVersion, currentVersion); } @Override public void onWaveletUpdate(List<TransformedWaveletDelta> deltas, HashedVersion lastCommittedVersion, HashedVersion currentVersion) throws ChannelException { if (!firstMessageReceived) { firstMessageReceived = true; } deltaChannel.onWaveletUpdate(deltas, lastCommittedVersion, currentVersion); } /** * Resets this stacklet ready for reconnection. */ public void reset() { deltaChannel.reset(opChannel); opChannel.reset(); } /** * Closes this stacklet permanently. */ public void close() { deltaChannel.reset(null); opChannel.close(); } public OperationChannel getOperationChannel() { return opChannel; } public boolean isExpectingSnapshot() { return dropAdditionalSnapshot; } } /** * Holder class for the copious number of loggers. */ public static class LoggerContext { public final LoggerBundle ops; public final LoggerBundle delta; public final LoggerBundle cc; public final LoggerBundle view; public LoggerContext(LoggerBundle ops, LoggerBundle delta, LoggerBundle cc, LoggerBundle view) { this.ops = ops; this.delta = delta; this.cc = cc; this.view = view; } } /** Multiplexer state. */ private static enum State { NOT_CONNECTED, CONNECTED, RECONNECTING } /** Wave id for channels in this mux. */ private final WaveId waveId; /** Multiplexed channels, indexed by wavelet id. */ private final Map<WaveletId, Stacklet> channels = CollectionUtils.newHashMap(); /** Factory for creating delta channels. */ private final DeltaChannelFactory deltaChannelFactory; /** Factory for creating operation-channel stacks on top of wave services. */ private final OperationChannelFactory opChannelFactory; /** Factory for creating a view channel */ private final ViewChannelFactory viewFactory; /** Logger. */ private final LoggerBundle logger; /** A stateful manager/factory for unsaved data listeners */ private final UnsavedDataListenerFactory unsavedDataListenerFactory; /** Synthesizer of initial wavelet snapshots for locally-created wavelets. */ private final ObservableWaveletData.Factory<?> dataFactory; /** Produces hashed versions. */ private final HashedVersionFactory hashFactory; /** List of commands to run when the underlying view becomes connected. */ private final List<Runnable> onConnected = CollectionUtils.newArrayList(); // // Mutable state. // /** Connection state of the mux. */ private State state; /** Whether the initial open of the mux has finished. */ private boolean openFinished = false; /** * Underlying multiplexed view channel; created on reconnection, set null on * close. */ private ViewChannel viewChannel; /** * Tag identifying which view connection is current. Changes on each * reconnection. */ private int connectionTag = 0; /** Filter specifying wavelets to open. */ private IdFilter waveletFilter; /** Listener for handling new operation channels. */ private Listener muxListener; /** Used to backoff when reconnecting. */ private final Scheduler scheduler; /** * Creates factory for building delta channels. * * @param logger logger to use for created channels */ private static DeltaChannelFactory createDeltaChannelFactory(final LoggerBundle logger) { return new DeltaChannelFactory() { @Override public MultiplexedDeltaChannel create(WaveletChannel waveletChannel) { return new WaveletDeltaChannelImpl(waveletChannel, logger); } }; } /** * Creates a factory for building operation channels on a wave. * * @param waveId wave id * @param unsavedDataListenerFactory factory for unsaved data listeners * @param loggers logger bundle * @return a new operation channel factory */ private static OperationChannelFactory createOperationChannelFactory(final WaveId waveId, final UnsavedDataListenerFactory unsavedDataListenerFactory, final LoggerContext loggers) { return new OperationChannelFactory() { @Override public InternalOperationChannel create(WaveletDeltaChannel deltaChannel, WaveletId waveletId, HashedVersion startVersion, Accessibility accessibility) { ConcurrencyControl cc = new ConcurrencyControl(loggers.cc, startVersion); if (unsavedDataListenerFactory != null) { cc.setUnsavedDataListener(unsavedDataListenerFactory.create(waveletId)); } return new OperationChannelImpl(loggers.ops, deltaChannel, cc, accessibility); } }; } /** * Creates a multiplexer. * * WARNING: the scheduler should provide back-off. Providing a scheduler which * executes immediately or does not back off may cause denial-of-service-like * reconnection attempts against the servers. Use something like * {@link FuzzingBackOffScheduler}. * * @param waveId wave id to open * @param viewFactory factory for opening view channels * @param dataFactory factory for making snapshots of empty wavelets * @param loggers log targets * @param unsavedDataListenerFactory a factory for adding listeners * @param scheduler scheduler for reconnection * @param hashFactory factory for hashed versions */ public OperationChannelMultiplexerImpl(WaveId waveId, ViewChannelFactory viewFactory, ObservableWaveletData.Factory<?> dataFactory, LoggerContext loggers, UnsavedDataListenerFactory unsavedDataListenerFactory, Scheduler scheduler, HashedVersionFactory hashFactory) { // Construct default dependency implementations, based on given arguments. this(waveId, createDeltaChannelFactory(loggers.delta), createOperationChannelFactory(waveId, unsavedDataListenerFactory, loggers), viewFactory, dataFactory, scheduler, loggers.view, unsavedDataListenerFactory, hashFactory); Preconditions.checkNotNull(dataFactory, "null dataFactory"); } /** * Creates a multiplexer (direct dependency arguments only). Exposed as * package-private for testing. * * @param opChannelFactory factory for creating operation-channel stacks * @param channelFactory factory for creating the underlying view channel * @param dataFactory factory for creating wavelet snapshots * @param scheduler used to back off when reconnecting. assumed not null. * @param logger log target * @param unsavedDataListenerFactory * @param hashFactory factory for hashed versions */ OperationChannelMultiplexerImpl( WaveId waveId, DeltaChannelFactory deltaChannelFactory, OperationChannelFactory opChannelFactory, ViewChannelFactory channelFactory, ObservableWaveletData.Factory<?> dataFactory, Scheduler scheduler, LoggerBundle logger, UnsavedDataListenerFactory unsavedDataListenerFactory, HashedVersionFactory hashFactory) { this.waveId = waveId; this.deltaChannelFactory = deltaChannelFactory; this.opChannelFactory = opChannelFactory; this.viewFactory = channelFactory; this.dataFactory = dataFactory; this.logger = logger; this.unsavedDataListenerFactory = unsavedDataListenerFactory; this.state = State.NOT_CONNECTED; this.scheduler = scheduler; this.hashFactory = hashFactory; } @Override public void open(Listener listener, IdFilter waveletFilter, Collection<KnownWavelet> knownWavelets) { this.muxListener = listener; this.waveletFilter = waveletFilter; try { if (!knownWavelets.isEmpty()) { for (KnownWavelet knownWavelet : knownWavelets) { Preconditions.checkNotNull(knownWavelet.snapshot, "Snapshot has no wavelet"); Preconditions.checkNotNull(knownWavelet.committedVersion, "Known wavelet has null committed version"); boolean dropAdditionalSnapshot = false; addOperationChannel(knownWavelet.snapshot.getWaveletId(), knownWavelet.snapshot, knownWavelet.committedVersion, knownWavelet.accessibility, dropAdditionalSnapshot); } // consider the wave as if open has finished. maybeOpenFinished(); } Map<WaveletId, List<HashedVersion>> knownSignatures = signaturesFromWavelets(knownWavelets); connect(knownSignatures); } catch (ChannelException e) { shutdown("Multiplexer open failed.", e); } } @Override public void open(Listener listener, IdFilter waveletFilter) { open(listener, waveletFilter, Collections.<KnownWavelet>emptyList()); } @Override public void close() { shutdown(ResponseCode.OK, "View closed.", null); } @Override public void createOperationChannel(WaveletId waveletId, ParticipantId creator) { if (channels.containsKey(waveletId)) { Preconditions.illegalArgument("Operation channel already exists for: " + waveletId); } // Create the new channel, and fake an initial snapshot. // TODO(anorth): inject a clock for providing timestamps. HashedVersion v0 = hashFactory.createVersionZero(WaveletName.of(waveId, waveletId)); final ObservableWaveletData emptySnapshot = dataFactory.create( new EmptyWaveletSnapshot(waveId, waveletId, creator, v0, System.currentTimeMillis())); try { boolean dropAdditionalSnapshot = true; addOperationChannel(waveletId, emptySnapshot, v0, Accessibility.READ_WRITE, dropAdditionalSnapshot); } catch (ChannelException e) { shutdown("Creating operation channel failed.", e); } } /** * Creates a view channel listener. The listener will forward messages to * stacklets while {@link #connectionTag} has the value it had at creation * time. When a channel (re)connects the tag changes. * * @param expectedWavelets wavelets and reconnection versions we expect to * receive a message for before * {@link ViewChannel.Listener#onOpenFinished()} */ private ViewChannel.Listener createViewListener( final Map<WaveletId, List<HashedVersion>> expectedWavelets) { final int expectedTag = connectionTag; return new ViewChannel.Listener() { /** * Wavelets for which we have not yet seen a message, or null after * onOpenFinished. */ Set<WaveletId> missingWavelets = CollectionUtils.newHashSet(expectedWavelets.keySet()); @Override public void onSnapshot(WaveletId waveletId, ObservableWaveletData wavelet, HashedVersion lastCommittedVersion, HashedVersion currentVersion) throws ChannelException { if (connectionTag == expectedTag) { removeMissingWavelet(waveletId); try { // Forward message to the appropriate stacklet, creating it if // needed. Stacklet stacklet = channels.get(waveletId); boolean dropAdditionalSnapshot = false; // TODO(anorth): Do better than guessing at accessibility here. if (stacklet == null) { createStacklet(waveletId, wavelet, Accessibility.READ_WRITE, dropAdditionalSnapshot); stacklet = channels.get(waveletId); } else if (!stacklet.isExpectingSnapshot()) { // Replace the existing stacklet by first removing the wavelet // and then adding the newly connected one. channels.remove(waveletId); unsavedDataListenerFactory.destroy(waveletId); muxListener.onOperationChannelRemoved(stacklet.getOperationChannel(), waveletId); createStacklet(waveletId, wavelet, Accessibility.READ_WRITE, dropAdditionalSnapshot); stacklet = channels.get(waveletId); } stacklet.onWaveletSnapshot(wavelet, lastCommittedVersion, currentVersion); } catch (ChannelException e) { throw exceptionWithContext(e, waveletId); } } } @Override public void onUpdate(WaveletId waveletId, List<TransformedWaveletDelta> deltas, HashedVersion lastCommittedVersion, HashedVersion currentVersion) throws ChannelException { if (connectionTag == expectedTag) { removeMissingWavelet(waveletId); maybeResetScheduler(deltas); try { Stacklet stacklet = channels.get(waveletId); if (stacklet == null) { //TODO(user): Figure out the right exception to throw here. throw new IllegalStateException("Received deltas with no stacklet present!"); } stacklet.onWaveletUpdate(deltas, lastCommittedVersion, currentVersion); } catch (ChannelException e) { throw exceptionWithContext(e, waveletId); } } else { logger.trace().log("Mux dropping update from defunct view"); } } @Override public void onOpenFinished() throws ChannelException { if (connectionTag == expectedTag) { if (missingWavelets == null) { // TODO(anorth): Add an error code for a protocol error and use // it here. throw new ChannelException(ResponseCode.INTERNAL_ERROR, "Multiplexer received openFinished twice", null, Recoverable.NOT_RECOVERABLE, waveId, null); } // If a missing wavelet could be reconnected at version zero then // fake the resync message here. The server no longer knows about // the wavelet so we should resubmit changes from version zero. Iterator<WaveletId> itr = missingWavelets.iterator(); while (itr.hasNext()) { WaveletId maybeMissing = itr.next(); List<HashedVersion> resyncVersions = expectedWavelets.get(maybeMissing); Preconditions.checkState(!resyncVersions.isEmpty(), "Empty resync versions for wavelet " + maybeMissing); if (resyncVersions.get(0).getVersion() == 0) { Stacklet stacklet = channels.get(maybeMissing); if (stacklet == null) { Preconditions.illegalState("Resync wavelet has no stacklet. Channels: " + channels.keySet() + ", resync: " + expectedWavelets.keySet()); } WaveletName wavelet = WaveletName.of(waveId, maybeMissing); List<TransformedWaveletDelta> resyncDeltaList = createVersionZeroResync(wavelet); HashedVersion v0 = hashFactory.createVersionZero(wavelet); stacklet.onWaveletUpdate(resyncDeltaList, v0, v0); itr.remove(); } } // Check we received a message for each expected wavelet. if (!missingWavelets.isEmpty()) { throw new ChannelException(ResponseCode.NOT_AUTHORIZED, "Server didn't acknowledge known wavelets; perhaps access has been lost: " + missingWavelets, null, Recoverable.NOT_RECOVERABLE, waveId, null); } missingWavelets = null; maybeOpenFinished(); } else { logger.trace().log("Mux dropping openFinished from defunct view"); } } @Override public void onConnected() { if (connectionTag == expectedTag) { OperationChannelMultiplexerImpl.this.onConnected(); } else { logger.trace().log("Mux dropping onConnected from defunct view"); } } @Override public void onClosed() { if (connectionTag == expectedTag) { reconnect(null); } else { logger.trace().log("Mux dropping onClosed from defunct view"); } } @Override public void onException(ChannelException e) { if (connectionTag == expectedTag) { onChannelException(e); } else { logger.trace().log("Mux dropping failure from defunct view"); } } /** * Adds a wavelet id to the set of seen ids if they are being tracked. */ private void removeMissingWavelet(WaveletId id) { if (missingWavelets != null) { missingWavelets.remove(id); } } /** * Resets the reconnection scheduler if a message indicates * the connection is somewhat ok. */ private void maybeResetScheduler(List<TransformedWaveletDelta> deltas) { // The connection is probably ok if we receive a delta. A snapshot // is not sufficient since some are locally generated. The delta need // not have ops; a reconnection delta is enough. if ((deltas.size() > 0)) { scheduler.reset(); } } }; } /** * Creates a stacklet and (optionally) initialises it with a snapshot. * * @param waveletId the wavelet id of the channel to create * @param snapshot the wavelet container for the new channel * @param committedVersion the committed version for the new channel * @param accessibility accessibility the user currently has to the wavelet * @param initialiseLocalChannel whether to send the snapshot through the * stacklet, in which case it should expect and drop an additional * snapshot from the network */ private void addOperationChannel(final WaveletId waveletId, ObservableWaveletData snapshot, HashedVersion committedVersion, Accessibility accessibility, boolean initialiseLocalChannel) throws ChannelException { final Stacklet stacklet = createStacklet(waveletId, snapshot, accessibility, initialiseLocalChannel); if (initialiseLocalChannel) { final HashedVersion currentVersion = snapshot.getHashedVersion(); initialiseLocallyCreatedStacklet(stacklet, waveletId, snapshot, committedVersion, currentVersion); } } /** * This is an ugly work-around the lack of ability to add channels to a view * in the view service API. We need to send some message through the stacklet * so it's connected but the server can't send us any message until we submit * the first delta, which requires a connected stacklet... */ private void initialiseLocallyCreatedStacklet(final Stacklet stacklet, final WaveletId waveletId, final ObservableWaveletData snapshot, final HashedVersion committedVersion, final HashedVersion currentVersion) throws ChannelException { if (state == State.CONNECTED) { try { stacklet.onWaveletSnapshot(snapshot, committedVersion, currentVersion); } catch (ChannelException e) { throw exceptionWithContext(e, waveletId); } } else { // Delay connecting the stacklet until the underlying view is connected. onConnected.add(new Runnable() { public void run() { try { stacklet.onWaveletSnapshot(snapshot, committedVersion, currentVersion); } catch (ChannelException e) { shutdown("Fake snapshot for wavelet channel " + waveId + "/" + waveletId + "failed", exceptionWithContext(e, waveletId)); } } }); } } /** * Adds a new operation-channel stacklet to this multiplexer and notifies the * listener of the new channel's creation. * * @param waveletId id of the concurrency domain for the new channel * @param snapshot wavelet initial state snapshot * @param accessibility accessibility of the stacklet; if not * {@link Accessibility#READ_WRITE} then * the stacklet will fail on send * @param dropSnapshot whether to expect and drop an additional snapshot from * the view */ private Stacklet createStacklet(final WaveletId waveletId, ObservableWaveletData snapshot, Accessibility accessibility, boolean dropSnapshot) { if (channels.containsKey(waveletId)) { Preconditions.illegalArgument("Cannot create duplicate channel for wavelet: " + waveId + "/" + waveletId); } WaveletChannel waveletChannel = createWaveletChannel(waveletId); MultiplexedDeltaChannel deltaChannel = deltaChannelFactory.create(waveletChannel); InternalOperationChannel opChannel = opChannelFactory.create(deltaChannel, waveletId, snapshot.getHashedVersion(), accessibility); Stacklet stacklet = new Stacklet(deltaChannel, opChannel, dropSnapshot); stacklet.reset(); channels.put(waveletId, stacklet); if (muxListener != null) { muxListener.onOperationChannelCreated(stacklet.getOperationChannel(), snapshot, accessibility); } return stacklet; } /** * Executes any pending commands in the {@link #onConnected} queue. */ private void onConnected() { state = State.CONNECTED; // Connect all channels created before now. for (Runnable command : onConnected) { command.run(); } onConnected.clear(); } /** * Handles failure of the view channel or an operation channel. * * @param e The exception that caused the channel to fail. */ private void onChannelException(ChannelException e) { if (e.getRecoverable() != Recoverable.RECOVERABLE) { shutdown(e.getResponseCode(), "Channel Exception", e); } else { reconnect(e); } } private void connect(Map<WaveletId, List<HashedVersion>> knownWavelets) { Preconditions.checkState(state != State.CONNECTED, "Cannot connect already-connected channel"); checkConnectVersions(knownWavelets); logger.trace().log("Multiplexer reconnecting wave " + waveId); viewChannel = viewFactory.create(waveId); viewChannel.open(createViewListener(knownWavelets), waveletFilter, knownWavelets); } /** * Checks that reconnect versions are strictly increasing and removes any * that are not accepted by the connection's wavelet filter. */ private void checkConnectVersions(Map<WaveletId, List<HashedVersion>> knownWavelets) { Iterator<Map.Entry<WaveletId, List<HashedVersion>>> itr = knownWavelets.entrySet().iterator(); while (itr.hasNext()) { Map.Entry<WaveletId, List<HashedVersion>> entry = itr.next(); WaveletId id = entry.getKey(); if (IdFilter.accepts(waveletFilter, id)) { long prevVersion = NO_VERSION; for (HashedVersion v : entry.getValue()) { if ((prevVersion != NO_VERSION) && (v.getVersion() <= prevVersion)) { throw new IllegalArgumentException("Invalid reconnect versions for " + waveId + id + ": " + entry.getValue()); } prevVersion = v.getVersion(); } } else { // TODO(anorth): throw an IllegalArgumentException here after fixing // all callers to avoid this. logger.error().log( "Mux for " + waveId + " dropping resync versions for filtered wavelet " + id + ", filter " + waveletFilter); itr.remove(); } } } /** * Terminates all stacklets then reconnects with the known versions * provided by them. * @param exception The exception that caused the reconnection */ private void reconnect(ChannelException exception) { logger.trace().logLazyObjects("Multiplexer disconnected in state ", state , ", reconnecting."); state = State.RECONNECTING; // NOTE(zdwang): don't clear this as we'll lose wavelets if we've never // been connected. This is a reminder. // onConnected.clear(); // Reset each stacklet, collecting the reconnect versions. final Map<WaveletId, List<HashedVersion>> knownWavelets = CollectionUtils.newHashMap(); for (final WaveletId wavelet : channels.keySet()) { final Stacklet stacklet = channels.get(wavelet); stacklet.reset(); knownWavelets.put(wavelet, stacklet.getOperationChannel().getReconnectVersions()); } // Close the view channel and ignore future messages from it. connectionTag++; viewChannel.close(); // Run the connect part in the scheduler scheduler.schedule(new Scheduler.Command() { int tag = connectionTag; @Override public void execute() { if (tag == connectionTag) { // Reconnect by creating another view channel. connect(knownWavelets); } } }); } /** * Shuts down this multiplexer permanently. * * @param reasonCode code representing failure reason. If the value is not * {@code ResponseCode.OK} then the listener will be notified of connection failure. * @param description reason for failure * @param exception any exception that caused the shutdown. */ private void shutdown(ResponseCode reasonCode, String description, Throwable exception) { if (description == null) { description = "(No error description provided)"; } boolean notifyFailure = (reasonCode != ResponseCode.OK); // We are telling the user through UI that the wave is corrupt, so we must also report it // to the server. if (notifyFailure) { if (exception == null) { logger.error().log(description); } else { logger.error().log(description, exception); } } if (viewChannel != null) { // Ignore future messages. connectionTag++; state = State.NOT_CONNECTED; for (Stacklet stacklet : channels.values()) { stacklet.close(); } channels.clear(); viewChannel.close(); viewChannel = null; if (muxListener != null && notifyFailure) { muxListener.onFailed(new CorruptionDetail(reasonCode, description, exception)); } muxListener = null; } } /** * Shuts down this multiplexer permanently after an exception. */ private void shutdown(String message, ChannelException e) { shutdown(e.getResponseCode(), message, e); } /** * Creates a wavelet channel for submissions against a wavelet. * * @param waveletId wavelet id for the channel */ private WaveletChannel createWaveletChannel(final WaveletId waveletId) { return new WaveletChannel() { @Override public void submit(WaveletDelta delta, final SubmitCallback callback) { viewChannel.submitDelta(waveletId, delta, callback); } @Override public String debugGetProfilingInfo() { return viewChannel.debugGetProfilingInfo(waveletId); } }; } private void maybeOpenFinished() { // Forward message to the mux's open listener. if (!openFinished) { openFinished = true; muxListener.onOpenFinished(); } } /** * Wraps a channel exception in another providing wave and wavelet id context. */ private ChannelException exceptionWithContext(ChannelException e, WaveletId waveletId) { return new ChannelException(e.getResponseCode(), "Nested ChannelException", e, e.getRecoverable(), waveId, waveletId); } /** * Constructs a maps of list of wavelet signatures from a collection of * wavelet snapshots. * * Package-private for testing. */ static Map<WaveletId, List<HashedVersion>> signaturesFromWavelets( Collection<KnownWavelet> knownWavelets) { Map<WaveletId, List<HashedVersion>> signatures = new HashMap<WaveletId, List<HashedVersion>>(); for (KnownWavelet knownWavelet : knownWavelets) { if (knownWavelet.accessibility.isReadable()) { ObservableWaveletData snapshot = knownWavelet.snapshot; WaveletId waveletId = snapshot.getWaveletId(); List<HashedVersion> sigs = Collections.singletonList(snapshot.getHashedVersion()); signatures.put(waveletId, sigs); } } return signatures; } /** * Creates a container message mimicking a resync message for a wavelet at * version zero. */ private List<TransformedWaveletDelta> createVersionZeroResync(WaveletName wavelet) { return Collections.singletonList(new TransformedWaveletDelta((ParticipantId) null, hashFactory.createVersionZero(wavelet), 0L, Collections.<WaveletOperation> emptyList())); } }
googleapis/google-cloud-java
35,041
java-datacatalog/proto-google-cloud-datacatalog-v1/src/main/java/com/google/cloud/datacatalog/v1/SetConfigRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/datacatalog/v1/datacatalog.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.datacatalog.v1; /** * * * <pre> * Request message for * [SetConfig][google.cloud.datacatalog.v1.DataCatalog.SetConfig]. * </pre> * * Protobuf type {@code google.cloud.datacatalog.v1.SetConfigRequest} */ public final class SetConfigRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.datacatalog.v1.SetConfigRequest) SetConfigRequestOrBuilder { private static final long serialVersionUID = 0L; // Use SetConfigRequest.newBuilder() to construct. private SetConfigRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SetConfigRequest() { name_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new SetConfigRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datacatalog.v1.Datacatalog .internal_static_google_cloud_datacatalog_v1_SetConfigRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datacatalog.v1.Datacatalog .internal_static_google_cloud_datacatalog_v1_SetConfigRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datacatalog.v1.SetConfigRequest.class, com.google.cloud.datacatalog.v1.SetConfigRequest.Builder.class); } private int configurationCase_ = 0; @SuppressWarnings("serial") private java.lang.Object configuration_; public enum ConfigurationCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { TAG_TEMPLATE_MIGRATION(2), CATALOG_UI_EXPERIENCE(3), CONFIGURATION_NOT_SET(0); private final int value; private ConfigurationCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static ConfigurationCase valueOf(int value) { return forNumber(value); } public static ConfigurationCase forNumber(int value) { switch (value) { case 2: return TAG_TEMPLATE_MIGRATION; case 3: return CATALOG_UI_EXPERIENCE; case 0: return CONFIGURATION_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public ConfigurationCase getConfigurationCase() { return ConfigurationCase.forNumber(configurationCase_); } public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * Required. The organization or project whose config is being specified. * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Required. The organization or project whose config is being specified. * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int TAG_TEMPLATE_MIGRATION_FIELD_NUMBER = 2; /** * * * <pre> * Opt-in status for the migration of Tag Templates to Dataplex. * </pre> * * <code>.google.cloud.datacatalog.v1.TagTemplateMigration tag_template_migration = 2;</code> * * @return Whether the tagTemplateMigration field is set. */ public boolean hasTagTemplateMigration() { return configurationCase_ == 2; } /** * * * <pre> * Opt-in status for the migration of Tag Templates to Dataplex. * </pre> * * <code>.google.cloud.datacatalog.v1.TagTemplateMigration tag_template_migration = 2;</code> * * @return The enum numeric value on the wire for tagTemplateMigration. */ public int getTagTemplateMigrationValue() { if (configurationCase_ == 2) { return (java.lang.Integer) configuration_; } return 0; } /** * * * <pre> * Opt-in status for the migration of Tag Templates to Dataplex. * </pre> * * <code>.google.cloud.datacatalog.v1.TagTemplateMigration tag_template_migration = 2;</code> * * @return The tagTemplateMigration. */ public com.google.cloud.datacatalog.v1.TagTemplateMigration getTagTemplateMigration() { if (configurationCase_ == 2) { com.google.cloud.datacatalog.v1.TagTemplateMigration result = com.google.cloud.datacatalog.v1.TagTemplateMigration.forNumber( (java.lang.Integer) configuration_); return result == null ? com.google.cloud.datacatalog.v1.TagTemplateMigration.UNRECOGNIZED : result; } return com.google.cloud.datacatalog.v1.TagTemplateMigration.TAG_TEMPLATE_MIGRATION_UNSPECIFIED; } public static final int CATALOG_UI_EXPERIENCE_FIELD_NUMBER = 3; /** * * * <pre> * Opt-in status for the UI switch to Dataplex. * </pre> * * <code>.google.cloud.datacatalog.v1.CatalogUIExperience catalog_ui_experience = 3;</code> * * @return Whether the catalogUiExperience field is set. */ public boolean hasCatalogUiExperience() { return configurationCase_ == 3; } /** * * * <pre> * Opt-in status for the UI switch to Dataplex. * </pre> * * <code>.google.cloud.datacatalog.v1.CatalogUIExperience catalog_ui_experience = 3;</code> * * @return The enum numeric value on the wire for catalogUiExperience. */ public int getCatalogUiExperienceValue() { if (configurationCase_ == 3) { return (java.lang.Integer) configuration_; } return 0; } /** * * * <pre> * Opt-in status for the UI switch to Dataplex. * </pre> * * <code>.google.cloud.datacatalog.v1.CatalogUIExperience catalog_ui_experience = 3;</code> * * @return The catalogUiExperience. */ public com.google.cloud.datacatalog.v1.CatalogUIExperience getCatalogUiExperience() { if (configurationCase_ == 3) { com.google.cloud.datacatalog.v1.CatalogUIExperience result = com.google.cloud.datacatalog.v1.CatalogUIExperience.forNumber( (java.lang.Integer) configuration_); return result == null ? com.google.cloud.datacatalog.v1.CatalogUIExperience.UNRECOGNIZED : result; } return com.google.cloud.datacatalog.v1.CatalogUIExperience.CATALOG_UI_EXPERIENCE_UNSPECIFIED; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (configurationCase_ == 2) { output.writeEnum(2, ((java.lang.Integer) configuration_)); } if (configurationCase_ == 3) { output.writeEnum(3, ((java.lang.Integer) configuration_)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (configurationCase_ == 2) { size += com.google.protobuf.CodedOutputStream.computeEnumSize( 2, ((java.lang.Integer) configuration_)); } if (configurationCase_ == 3) { size += com.google.protobuf.CodedOutputStream.computeEnumSize( 3, ((java.lang.Integer) configuration_)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.datacatalog.v1.SetConfigRequest)) { return super.equals(obj); } com.google.cloud.datacatalog.v1.SetConfigRequest other = (com.google.cloud.datacatalog.v1.SetConfigRequest) obj; if (!getName().equals(other.getName())) return false; if (!getConfigurationCase().equals(other.getConfigurationCase())) return false; switch (configurationCase_) { case 2: if (getTagTemplateMigrationValue() != other.getTagTemplateMigrationValue()) return false; break; case 3: if (getCatalogUiExperienceValue() != other.getCatalogUiExperienceValue()) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); switch (configurationCase_) { case 2: hash = (37 * hash) + TAG_TEMPLATE_MIGRATION_FIELD_NUMBER; hash = (53 * hash) + getTagTemplateMigrationValue(); break; case 3: hash = (37 * hash) + CATALOG_UI_EXPERIENCE_FIELD_NUMBER; hash = (53 * hash) + getCatalogUiExperienceValue(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.datacatalog.v1.SetConfigRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1.SetConfigRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1.SetConfigRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1.SetConfigRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1.SetConfigRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1.SetConfigRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1.SetConfigRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1.SetConfigRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datacatalog.v1.SetConfigRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1.SetConfigRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datacatalog.v1.SetConfigRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1.SetConfigRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.datacatalog.v1.SetConfigRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [SetConfig][google.cloud.datacatalog.v1.DataCatalog.SetConfig]. * </pre> * * Protobuf type {@code google.cloud.datacatalog.v1.SetConfigRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.datacatalog.v1.SetConfigRequest) com.google.cloud.datacatalog.v1.SetConfigRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datacatalog.v1.Datacatalog .internal_static_google_cloud_datacatalog_v1_SetConfigRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datacatalog.v1.Datacatalog .internal_static_google_cloud_datacatalog_v1_SetConfigRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datacatalog.v1.SetConfigRequest.class, com.google.cloud.datacatalog.v1.SetConfigRequest.Builder.class); } // Construct using com.google.cloud.datacatalog.v1.SetConfigRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; configurationCase_ = 0; configuration_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.datacatalog.v1.Datacatalog .internal_static_google_cloud_datacatalog_v1_SetConfigRequest_descriptor; } @java.lang.Override public com.google.cloud.datacatalog.v1.SetConfigRequest getDefaultInstanceForType() { return com.google.cloud.datacatalog.v1.SetConfigRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.datacatalog.v1.SetConfigRequest build() { com.google.cloud.datacatalog.v1.SetConfigRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.datacatalog.v1.SetConfigRequest buildPartial() { com.google.cloud.datacatalog.v1.SetConfigRequest result = new com.google.cloud.datacatalog.v1.SetConfigRequest(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.cloud.datacatalog.v1.SetConfigRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; } } private void buildPartialOneofs(com.google.cloud.datacatalog.v1.SetConfigRequest result) { result.configurationCase_ = configurationCase_; result.configuration_ = this.configuration_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.datacatalog.v1.SetConfigRequest) { return mergeFrom((com.google.cloud.datacatalog.v1.SetConfigRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.datacatalog.v1.SetConfigRequest other) { if (other == com.google.cloud.datacatalog.v1.SetConfigRequest.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } switch (other.getConfigurationCase()) { case TAG_TEMPLATE_MIGRATION: { setTagTemplateMigrationValue(other.getTagTemplateMigrationValue()); break; } case CATALOG_UI_EXPERIENCE: { setCatalogUiExperienceValue(other.getCatalogUiExperienceValue()); break; } case CONFIGURATION_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { int rawValue = input.readEnum(); configurationCase_ = 2; configuration_ = rawValue; break; } // case 16 case 24: { int rawValue = input.readEnum(); configurationCase_ = 3; configuration_ = rawValue; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int configurationCase_ = 0; private java.lang.Object configuration_; public ConfigurationCase getConfigurationCase() { return ConfigurationCase.forNumber(configurationCase_); } public Builder clearConfiguration() { configurationCase_ = 0; configuration_ = null; onChanged(); return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * * * <pre> * Required. The organization or project whose config is being specified. * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The organization or project whose config is being specified. * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The organization or project whose config is being specified. * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The organization or project whose config is being specified. * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The organization or project whose config is being specified. * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Opt-in status for the migration of Tag Templates to Dataplex. * </pre> * * <code>.google.cloud.datacatalog.v1.TagTemplateMigration tag_template_migration = 2;</code> * * @return Whether the tagTemplateMigration field is set. */ @java.lang.Override public boolean hasTagTemplateMigration() { return configurationCase_ == 2; } /** * * * <pre> * Opt-in status for the migration of Tag Templates to Dataplex. * </pre> * * <code>.google.cloud.datacatalog.v1.TagTemplateMigration tag_template_migration = 2;</code> * * @return The enum numeric value on the wire for tagTemplateMigration. */ @java.lang.Override public int getTagTemplateMigrationValue() { if (configurationCase_ == 2) { return ((java.lang.Integer) configuration_).intValue(); } return 0; } /** * * * <pre> * Opt-in status for the migration of Tag Templates to Dataplex. * </pre> * * <code>.google.cloud.datacatalog.v1.TagTemplateMigration tag_template_migration = 2;</code> * * @param value The enum numeric value on the wire for tagTemplateMigration to set. * @return This builder for chaining. */ public Builder setTagTemplateMigrationValue(int value) { configurationCase_ = 2; configuration_ = value; onChanged(); return this; } /** * * * <pre> * Opt-in status for the migration of Tag Templates to Dataplex. * </pre> * * <code>.google.cloud.datacatalog.v1.TagTemplateMigration tag_template_migration = 2;</code> * * @return The tagTemplateMigration. */ @java.lang.Override public com.google.cloud.datacatalog.v1.TagTemplateMigration getTagTemplateMigration() { if (configurationCase_ == 2) { com.google.cloud.datacatalog.v1.TagTemplateMigration result = com.google.cloud.datacatalog.v1.TagTemplateMigration.forNumber( (java.lang.Integer) configuration_); return result == null ? com.google.cloud.datacatalog.v1.TagTemplateMigration.UNRECOGNIZED : result; } return com.google.cloud.datacatalog.v1.TagTemplateMigration .TAG_TEMPLATE_MIGRATION_UNSPECIFIED; } /** * * * <pre> * Opt-in status for the migration of Tag Templates to Dataplex. * </pre> * * <code>.google.cloud.datacatalog.v1.TagTemplateMigration tag_template_migration = 2;</code> * * @param value The tagTemplateMigration to set. * @return This builder for chaining. */ public Builder setTagTemplateMigration( com.google.cloud.datacatalog.v1.TagTemplateMigration value) { if (value == null) { throw new NullPointerException(); } configurationCase_ = 2; configuration_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Opt-in status for the migration of Tag Templates to Dataplex. * </pre> * * <code>.google.cloud.datacatalog.v1.TagTemplateMigration tag_template_migration = 2;</code> * * @return This builder for chaining. */ public Builder clearTagTemplateMigration() { if (configurationCase_ == 2) { configurationCase_ = 0; configuration_ = null; onChanged(); } return this; } /** * * * <pre> * Opt-in status for the UI switch to Dataplex. * </pre> * * <code>.google.cloud.datacatalog.v1.CatalogUIExperience catalog_ui_experience = 3;</code> * * @return Whether the catalogUiExperience field is set. */ @java.lang.Override public boolean hasCatalogUiExperience() { return configurationCase_ == 3; } /** * * * <pre> * Opt-in status for the UI switch to Dataplex. * </pre> * * <code>.google.cloud.datacatalog.v1.CatalogUIExperience catalog_ui_experience = 3;</code> * * @return The enum numeric value on the wire for catalogUiExperience. */ @java.lang.Override public int getCatalogUiExperienceValue() { if (configurationCase_ == 3) { return ((java.lang.Integer) configuration_).intValue(); } return 0; } /** * * * <pre> * Opt-in status for the UI switch to Dataplex. * </pre> * * <code>.google.cloud.datacatalog.v1.CatalogUIExperience catalog_ui_experience = 3;</code> * * @param value The enum numeric value on the wire for catalogUiExperience to set. * @return This builder for chaining. */ public Builder setCatalogUiExperienceValue(int value) { configurationCase_ = 3; configuration_ = value; onChanged(); return this; } /** * * * <pre> * Opt-in status for the UI switch to Dataplex. * </pre> * * <code>.google.cloud.datacatalog.v1.CatalogUIExperience catalog_ui_experience = 3;</code> * * @return The catalogUiExperience. */ @java.lang.Override public com.google.cloud.datacatalog.v1.CatalogUIExperience getCatalogUiExperience() { if (configurationCase_ == 3) { com.google.cloud.datacatalog.v1.CatalogUIExperience result = com.google.cloud.datacatalog.v1.CatalogUIExperience.forNumber( (java.lang.Integer) configuration_); return result == null ? com.google.cloud.datacatalog.v1.CatalogUIExperience.UNRECOGNIZED : result; } return com.google.cloud.datacatalog.v1.CatalogUIExperience.CATALOG_UI_EXPERIENCE_UNSPECIFIED; } /** * * * <pre> * Opt-in status for the UI switch to Dataplex. * </pre> * * <code>.google.cloud.datacatalog.v1.CatalogUIExperience catalog_ui_experience = 3;</code> * * @param value The catalogUiExperience to set. * @return This builder for chaining. */ public Builder setCatalogUiExperience( com.google.cloud.datacatalog.v1.CatalogUIExperience value) { if (value == null) { throw new NullPointerException(); } configurationCase_ = 3; configuration_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Opt-in status for the UI switch to Dataplex. * </pre> * * <code>.google.cloud.datacatalog.v1.CatalogUIExperience catalog_ui_experience = 3;</code> * * @return This builder for chaining. */ public Builder clearCatalogUiExperience() { if (configurationCase_ == 3) { configurationCase_ = 0; configuration_ = null; onChanged(); } return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.datacatalog.v1.SetConfigRequest) } // @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1.SetConfigRequest) private static final com.google.cloud.datacatalog.v1.SetConfigRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.datacatalog.v1.SetConfigRequest(); } public static com.google.cloud.datacatalog.v1.SetConfigRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SetConfigRequest> PARSER = new com.google.protobuf.AbstractParser<SetConfigRequest>() { @java.lang.Override public SetConfigRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<SetConfigRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SetConfigRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.datacatalog.v1.SetConfigRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,256
google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/resources/CustomerConversionGoal.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v19/resources/customer_conversion_goal.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v19.resources; /** * <pre> * Biddability control for conversion actions with a matching category and * origin. * </pre> * * Protobuf type {@code google.ads.googleads.v19.resources.CustomerConversionGoal} */ public final class CustomerConversionGoal extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v19.resources.CustomerConversionGoal) CustomerConversionGoalOrBuilder { private static final long serialVersionUID = 0L; // Use CustomerConversionGoal.newBuilder() to construct. private CustomerConversionGoal(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CustomerConversionGoal() { resourceName_ = ""; category_ = 0; origin_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new CustomerConversionGoal(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.resources.CustomerConversionGoalProto.internal_static_google_ads_googleads_v19_resources_CustomerConversionGoal_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.resources.CustomerConversionGoalProto.internal_static_google_ads_googleads_v19_resources_CustomerConversionGoal_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.resources.CustomerConversionGoal.class, com.google.ads.googleads.v19.resources.CustomerConversionGoal.Builder.class); } public static final int RESOURCE_NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object resourceName_ = ""; /** * <pre> * Immutable. The resource name of the customer conversion goal. * Customer conversion goal resource names have the form: * * `customers/{customer_id}/customerConversionGoals/{category}~{origin}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ @java.lang.Override public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } } /** * <pre> * Immutable. The resource name of the customer conversion goal. * Customer conversion goal resource names have the form: * * `customers/{customer_id}/customerConversionGoals/{category}~{origin}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ @java.lang.Override public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int CATEGORY_FIELD_NUMBER = 2; private int category_ = 0; /** * <pre> * The conversion category of this customer conversion goal. Only * conversion actions that have this category will be included in this goal. * </pre> * * <code>.google.ads.googleads.v19.enums.ConversionActionCategoryEnum.ConversionActionCategory category = 2;</code> * @return The enum numeric value on the wire for category. */ @java.lang.Override public int getCategoryValue() { return category_; } /** * <pre> * The conversion category of this customer conversion goal. Only * conversion actions that have this category will be included in this goal. * </pre> * * <code>.google.ads.googleads.v19.enums.ConversionActionCategoryEnum.ConversionActionCategory category = 2;</code> * @return The category. */ @java.lang.Override public com.google.ads.googleads.v19.enums.ConversionActionCategoryEnum.ConversionActionCategory getCategory() { com.google.ads.googleads.v19.enums.ConversionActionCategoryEnum.ConversionActionCategory result = com.google.ads.googleads.v19.enums.ConversionActionCategoryEnum.ConversionActionCategory.forNumber(category_); return result == null ? com.google.ads.googleads.v19.enums.ConversionActionCategoryEnum.ConversionActionCategory.UNRECOGNIZED : result; } public static final int ORIGIN_FIELD_NUMBER = 3; private int origin_ = 0; /** * <pre> * The conversion origin of this customer conversion goal. Only * conversion actions that have this conversion origin will be included in * this goal. * </pre> * * <code>.google.ads.googleads.v19.enums.ConversionOriginEnum.ConversionOrigin origin = 3;</code> * @return The enum numeric value on the wire for origin. */ @java.lang.Override public int getOriginValue() { return origin_; } /** * <pre> * The conversion origin of this customer conversion goal. Only * conversion actions that have this conversion origin will be included in * this goal. * </pre> * * <code>.google.ads.googleads.v19.enums.ConversionOriginEnum.ConversionOrigin origin = 3;</code> * @return The origin. */ @java.lang.Override public com.google.ads.googleads.v19.enums.ConversionOriginEnum.ConversionOrigin getOrigin() { com.google.ads.googleads.v19.enums.ConversionOriginEnum.ConversionOrigin result = com.google.ads.googleads.v19.enums.ConversionOriginEnum.ConversionOrigin.forNumber(origin_); return result == null ? com.google.ads.googleads.v19.enums.ConversionOriginEnum.ConversionOrigin.UNRECOGNIZED : result; } public static final int BIDDABLE_FIELD_NUMBER = 4; private boolean biddable_ = false; /** * <pre> * The biddability of the customer conversion goal. * </pre> * * <code>bool biddable = 4;</code> * @return The biddable. */ @java.lang.Override public boolean getBiddable() { return biddable_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceName_); } if (category_ != com.google.ads.googleads.v19.enums.ConversionActionCategoryEnum.ConversionActionCategory.UNSPECIFIED.getNumber()) { output.writeEnum(2, category_); } if (origin_ != com.google.ads.googleads.v19.enums.ConversionOriginEnum.ConversionOrigin.UNSPECIFIED.getNumber()) { output.writeEnum(3, origin_); } if (biddable_ != false) { output.writeBool(4, biddable_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceName_); } if (category_ != com.google.ads.googleads.v19.enums.ConversionActionCategoryEnum.ConversionActionCategory.UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(2, category_); } if (origin_ != com.google.ads.googleads.v19.enums.ConversionOriginEnum.ConversionOrigin.UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(3, origin_); } if (biddable_ != false) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(4, biddable_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v19.resources.CustomerConversionGoal)) { return super.equals(obj); } com.google.ads.googleads.v19.resources.CustomerConversionGoal other = (com.google.ads.googleads.v19.resources.CustomerConversionGoal) obj; if (!getResourceName() .equals(other.getResourceName())) return false; if (category_ != other.category_) return false; if (origin_ != other.origin_) return false; if (getBiddable() != other.getBiddable()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER; hash = (53 * hash) + getResourceName().hashCode(); hash = (37 * hash) + CATEGORY_FIELD_NUMBER; hash = (53 * hash) + category_; hash = (37 * hash) + ORIGIN_FIELD_NUMBER; hash = (53 * hash) + origin_; hash = (37 * hash) + BIDDABLE_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( getBiddable()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v19.resources.CustomerConversionGoal parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.resources.CustomerConversionGoal parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.resources.CustomerConversionGoal parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.resources.CustomerConversionGoal parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.resources.CustomerConversionGoal parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.resources.CustomerConversionGoal parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.resources.CustomerConversionGoal parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.resources.CustomerConversionGoal parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.resources.CustomerConversionGoal parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.resources.CustomerConversionGoal parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.resources.CustomerConversionGoal parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.resources.CustomerConversionGoal parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v19.resources.CustomerConversionGoal prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Biddability control for conversion actions with a matching category and * origin. * </pre> * * Protobuf type {@code google.ads.googleads.v19.resources.CustomerConversionGoal} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.resources.CustomerConversionGoal) com.google.ads.googleads.v19.resources.CustomerConversionGoalOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.resources.CustomerConversionGoalProto.internal_static_google_ads_googleads_v19_resources_CustomerConversionGoal_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.resources.CustomerConversionGoalProto.internal_static_google_ads_googleads_v19_resources_CustomerConversionGoal_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.resources.CustomerConversionGoal.class, com.google.ads.googleads.v19.resources.CustomerConversionGoal.Builder.class); } // Construct using com.google.ads.googleads.v19.resources.CustomerConversionGoal.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; resourceName_ = ""; category_ = 0; origin_ = 0; biddable_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v19.resources.CustomerConversionGoalProto.internal_static_google_ads_googleads_v19_resources_CustomerConversionGoal_descriptor; } @java.lang.Override public com.google.ads.googleads.v19.resources.CustomerConversionGoal getDefaultInstanceForType() { return com.google.ads.googleads.v19.resources.CustomerConversionGoal.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v19.resources.CustomerConversionGoal build() { com.google.ads.googleads.v19.resources.CustomerConversionGoal result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v19.resources.CustomerConversionGoal buildPartial() { com.google.ads.googleads.v19.resources.CustomerConversionGoal result = new com.google.ads.googleads.v19.resources.CustomerConversionGoal(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v19.resources.CustomerConversionGoal result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.resourceName_ = resourceName_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.category_ = category_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.origin_ = origin_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.biddable_ = biddable_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v19.resources.CustomerConversionGoal) { return mergeFrom((com.google.ads.googleads.v19.resources.CustomerConversionGoal)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v19.resources.CustomerConversionGoal other) { if (other == com.google.ads.googleads.v19.resources.CustomerConversionGoal.getDefaultInstance()) return this; if (!other.getResourceName().isEmpty()) { resourceName_ = other.resourceName_; bitField0_ |= 0x00000001; onChanged(); } if (other.category_ != 0) { setCategoryValue(other.getCategoryValue()); } if (other.origin_ != 0) { setOriginValue(other.getOriginValue()); } if (other.getBiddable() != false) { setBiddable(other.getBiddable()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { resourceName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { category_ = input.readEnum(); bitField0_ |= 0x00000002; break; } // case 16 case 24: { origin_ = input.readEnum(); bitField0_ |= 0x00000004; break; } // case 24 case 32: { biddable_ = input.readBool(); bitField0_ |= 0x00000008; break; } // case 32 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object resourceName_ = ""; /** * <pre> * Immutable. The resource name of the customer conversion goal. * Customer conversion goal resource names have the form: * * `customers/{customer_id}/customerConversionGoals/{category}~{origin}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Immutable. The resource name of the customer conversion goal. * Customer conversion goal resource names have the form: * * `customers/{customer_id}/customerConversionGoals/{category}~{origin}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Immutable. The resource name of the customer conversion goal. * Customer conversion goal resource names have the form: * * `customers/{customer_id}/customerConversionGoals/{category}~{origin}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @param value The resourceName to set. * @return This builder for chaining. */ public Builder setResourceName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } resourceName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * Immutable. The resource name of the customer conversion goal. * Customer conversion goal resource names have the form: * * `customers/{customer_id}/customerConversionGoals/{category}~{origin}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return This builder for chaining. */ public Builder clearResourceName() { resourceName_ = getDefaultInstance().getResourceName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * <pre> * Immutable. The resource name of the customer conversion goal. * Customer conversion goal resource names have the form: * * `customers/{customer_id}/customerConversionGoals/{category}~{origin}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @param value The bytes for resourceName to set. * @return This builder for chaining. */ public Builder setResourceNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); resourceName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int category_ = 0; /** * <pre> * The conversion category of this customer conversion goal. Only * conversion actions that have this category will be included in this goal. * </pre> * * <code>.google.ads.googleads.v19.enums.ConversionActionCategoryEnum.ConversionActionCategory category = 2;</code> * @return The enum numeric value on the wire for category. */ @java.lang.Override public int getCategoryValue() { return category_; } /** * <pre> * The conversion category of this customer conversion goal. Only * conversion actions that have this category will be included in this goal. * </pre> * * <code>.google.ads.googleads.v19.enums.ConversionActionCategoryEnum.ConversionActionCategory category = 2;</code> * @param value The enum numeric value on the wire for category to set. * @return This builder for chaining. */ public Builder setCategoryValue(int value) { category_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * The conversion category of this customer conversion goal. Only * conversion actions that have this category will be included in this goal. * </pre> * * <code>.google.ads.googleads.v19.enums.ConversionActionCategoryEnum.ConversionActionCategory category = 2;</code> * @return The category. */ @java.lang.Override public com.google.ads.googleads.v19.enums.ConversionActionCategoryEnum.ConversionActionCategory getCategory() { com.google.ads.googleads.v19.enums.ConversionActionCategoryEnum.ConversionActionCategory result = com.google.ads.googleads.v19.enums.ConversionActionCategoryEnum.ConversionActionCategory.forNumber(category_); return result == null ? com.google.ads.googleads.v19.enums.ConversionActionCategoryEnum.ConversionActionCategory.UNRECOGNIZED : result; } /** * <pre> * The conversion category of this customer conversion goal. Only * conversion actions that have this category will be included in this goal. * </pre> * * <code>.google.ads.googleads.v19.enums.ConversionActionCategoryEnum.ConversionActionCategory category = 2;</code> * @param value The category to set. * @return This builder for chaining. */ public Builder setCategory(com.google.ads.googleads.v19.enums.ConversionActionCategoryEnum.ConversionActionCategory value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; category_ = value.getNumber(); onChanged(); return this; } /** * <pre> * The conversion category of this customer conversion goal. Only * conversion actions that have this category will be included in this goal. * </pre> * * <code>.google.ads.googleads.v19.enums.ConversionActionCategoryEnum.ConversionActionCategory category = 2;</code> * @return This builder for chaining. */ public Builder clearCategory() { bitField0_ = (bitField0_ & ~0x00000002); category_ = 0; onChanged(); return this; } private int origin_ = 0; /** * <pre> * The conversion origin of this customer conversion goal. Only * conversion actions that have this conversion origin will be included in * this goal. * </pre> * * <code>.google.ads.googleads.v19.enums.ConversionOriginEnum.ConversionOrigin origin = 3;</code> * @return The enum numeric value on the wire for origin. */ @java.lang.Override public int getOriginValue() { return origin_; } /** * <pre> * The conversion origin of this customer conversion goal. Only * conversion actions that have this conversion origin will be included in * this goal. * </pre> * * <code>.google.ads.googleads.v19.enums.ConversionOriginEnum.ConversionOrigin origin = 3;</code> * @param value The enum numeric value on the wire for origin to set. * @return This builder for chaining. */ public Builder setOriginValue(int value) { origin_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * <pre> * The conversion origin of this customer conversion goal. Only * conversion actions that have this conversion origin will be included in * this goal. * </pre> * * <code>.google.ads.googleads.v19.enums.ConversionOriginEnum.ConversionOrigin origin = 3;</code> * @return The origin. */ @java.lang.Override public com.google.ads.googleads.v19.enums.ConversionOriginEnum.ConversionOrigin getOrigin() { com.google.ads.googleads.v19.enums.ConversionOriginEnum.ConversionOrigin result = com.google.ads.googleads.v19.enums.ConversionOriginEnum.ConversionOrigin.forNumber(origin_); return result == null ? com.google.ads.googleads.v19.enums.ConversionOriginEnum.ConversionOrigin.UNRECOGNIZED : result; } /** * <pre> * The conversion origin of this customer conversion goal. Only * conversion actions that have this conversion origin will be included in * this goal. * </pre> * * <code>.google.ads.googleads.v19.enums.ConversionOriginEnum.ConversionOrigin origin = 3;</code> * @param value The origin to set. * @return This builder for chaining. */ public Builder setOrigin(com.google.ads.googleads.v19.enums.ConversionOriginEnum.ConversionOrigin value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; origin_ = value.getNumber(); onChanged(); return this; } /** * <pre> * The conversion origin of this customer conversion goal. Only * conversion actions that have this conversion origin will be included in * this goal. * </pre> * * <code>.google.ads.googleads.v19.enums.ConversionOriginEnum.ConversionOrigin origin = 3;</code> * @return This builder for chaining. */ public Builder clearOrigin() { bitField0_ = (bitField0_ & ~0x00000004); origin_ = 0; onChanged(); return this; } private boolean biddable_ ; /** * <pre> * The biddability of the customer conversion goal. * </pre> * * <code>bool biddable = 4;</code> * @return The biddable. */ @java.lang.Override public boolean getBiddable() { return biddable_; } /** * <pre> * The biddability of the customer conversion goal. * </pre> * * <code>bool biddable = 4;</code> * @param value The biddable to set. * @return This builder for chaining. */ public Builder setBiddable(boolean value) { biddable_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * <pre> * The biddability of the customer conversion goal. * </pre> * * <code>bool biddable = 4;</code> * @return This builder for chaining. */ public Builder clearBiddable() { bitField0_ = (bitField0_ & ~0x00000008); biddable_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.resources.CustomerConversionGoal) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v19.resources.CustomerConversionGoal) private static final com.google.ads.googleads.v19.resources.CustomerConversionGoal DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v19.resources.CustomerConversionGoal(); } public static com.google.ads.googleads.v19.resources.CustomerConversionGoal getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CustomerConversionGoal> PARSER = new com.google.protobuf.AbstractParser<CustomerConversionGoal>() { @java.lang.Override public CustomerConversionGoal parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CustomerConversionGoal> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CustomerConversionGoal> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v19.resources.CustomerConversionGoal getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,256
google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/resources/CustomerConversionGoal.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v20/resources/customer_conversion_goal.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v20.resources; /** * <pre> * Biddability control for conversion actions with a matching category and * origin. * </pre> * * Protobuf type {@code google.ads.googleads.v20.resources.CustomerConversionGoal} */ public final class CustomerConversionGoal extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v20.resources.CustomerConversionGoal) CustomerConversionGoalOrBuilder { private static final long serialVersionUID = 0L; // Use CustomerConversionGoal.newBuilder() to construct. private CustomerConversionGoal(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CustomerConversionGoal() { resourceName_ = ""; category_ = 0; origin_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new CustomerConversionGoal(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.resources.CustomerConversionGoalProto.internal_static_google_ads_googleads_v20_resources_CustomerConversionGoal_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.resources.CustomerConversionGoalProto.internal_static_google_ads_googleads_v20_resources_CustomerConversionGoal_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.resources.CustomerConversionGoal.class, com.google.ads.googleads.v20.resources.CustomerConversionGoal.Builder.class); } public static final int RESOURCE_NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object resourceName_ = ""; /** * <pre> * Immutable. The resource name of the customer conversion goal. * Customer conversion goal resource names have the form: * * `customers/{customer_id}/customerConversionGoals/{category}~{origin}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ @java.lang.Override public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } } /** * <pre> * Immutable. The resource name of the customer conversion goal. * Customer conversion goal resource names have the form: * * `customers/{customer_id}/customerConversionGoals/{category}~{origin}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ @java.lang.Override public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int CATEGORY_FIELD_NUMBER = 2; private int category_ = 0; /** * <pre> * The conversion category of this customer conversion goal. Only * conversion actions that have this category will be included in this goal. * </pre> * * <code>.google.ads.googleads.v20.enums.ConversionActionCategoryEnum.ConversionActionCategory category = 2;</code> * @return The enum numeric value on the wire for category. */ @java.lang.Override public int getCategoryValue() { return category_; } /** * <pre> * The conversion category of this customer conversion goal. Only * conversion actions that have this category will be included in this goal. * </pre> * * <code>.google.ads.googleads.v20.enums.ConversionActionCategoryEnum.ConversionActionCategory category = 2;</code> * @return The category. */ @java.lang.Override public com.google.ads.googleads.v20.enums.ConversionActionCategoryEnum.ConversionActionCategory getCategory() { com.google.ads.googleads.v20.enums.ConversionActionCategoryEnum.ConversionActionCategory result = com.google.ads.googleads.v20.enums.ConversionActionCategoryEnum.ConversionActionCategory.forNumber(category_); return result == null ? com.google.ads.googleads.v20.enums.ConversionActionCategoryEnum.ConversionActionCategory.UNRECOGNIZED : result; } public static final int ORIGIN_FIELD_NUMBER = 3; private int origin_ = 0; /** * <pre> * The conversion origin of this customer conversion goal. Only * conversion actions that have this conversion origin will be included in * this goal. * </pre> * * <code>.google.ads.googleads.v20.enums.ConversionOriginEnum.ConversionOrigin origin = 3;</code> * @return The enum numeric value on the wire for origin. */ @java.lang.Override public int getOriginValue() { return origin_; } /** * <pre> * The conversion origin of this customer conversion goal. Only * conversion actions that have this conversion origin will be included in * this goal. * </pre> * * <code>.google.ads.googleads.v20.enums.ConversionOriginEnum.ConversionOrigin origin = 3;</code> * @return The origin. */ @java.lang.Override public com.google.ads.googleads.v20.enums.ConversionOriginEnum.ConversionOrigin getOrigin() { com.google.ads.googleads.v20.enums.ConversionOriginEnum.ConversionOrigin result = com.google.ads.googleads.v20.enums.ConversionOriginEnum.ConversionOrigin.forNumber(origin_); return result == null ? com.google.ads.googleads.v20.enums.ConversionOriginEnum.ConversionOrigin.UNRECOGNIZED : result; } public static final int BIDDABLE_FIELD_NUMBER = 4; private boolean biddable_ = false; /** * <pre> * The biddability of the customer conversion goal. * </pre> * * <code>bool biddable = 4;</code> * @return The biddable. */ @java.lang.Override public boolean getBiddable() { return biddable_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceName_); } if (category_ != com.google.ads.googleads.v20.enums.ConversionActionCategoryEnum.ConversionActionCategory.UNSPECIFIED.getNumber()) { output.writeEnum(2, category_); } if (origin_ != com.google.ads.googleads.v20.enums.ConversionOriginEnum.ConversionOrigin.UNSPECIFIED.getNumber()) { output.writeEnum(3, origin_); } if (biddable_ != false) { output.writeBool(4, biddable_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceName_); } if (category_ != com.google.ads.googleads.v20.enums.ConversionActionCategoryEnum.ConversionActionCategory.UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(2, category_); } if (origin_ != com.google.ads.googleads.v20.enums.ConversionOriginEnum.ConversionOrigin.UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(3, origin_); } if (biddable_ != false) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(4, biddable_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v20.resources.CustomerConversionGoal)) { return super.equals(obj); } com.google.ads.googleads.v20.resources.CustomerConversionGoal other = (com.google.ads.googleads.v20.resources.CustomerConversionGoal) obj; if (!getResourceName() .equals(other.getResourceName())) return false; if (category_ != other.category_) return false; if (origin_ != other.origin_) return false; if (getBiddable() != other.getBiddable()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER; hash = (53 * hash) + getResourceName().hashCode(); hash = (37 * hash) + CATEGORY_FIELD_NUMBER; hash = (53 * hash) + category_; hash = (37 * hash) + ORIGIN_FIELD_NUMBER; hash = (53 * hash) + origin_; hash = (37 * hash) + BIDDABLE_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( getBiddable()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v20.resources.CustomerConversionGoal parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.resources.CustomerConversionGoal parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.resources.CustomerConversionGoal parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.resources.CustomerConversionGoal parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.resources.CustomerConversionGoal parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.resources.CustomerConversionGoal parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.resources.CustomerConversionGoal parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.resources.CustomerConversionGoal parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.resources.CustomerConversionGoal parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.resources.CustomerConversionGoal parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.resources.CustomerConversionGoal parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.resources.CustomerConversionGoal parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v20.resources.CustomerConversionGoal prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Biddability control for conversion actions with a matching category and * origin. * </pre> * * Protobuf type {@code google.ads.googleads.v20.resources.CustomerConversionGoal} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.resources.CustomerConversionGoal) com.google.ads.googleads.v20.resources.CustomerConversionGoalOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.resources.CustomerConversionGoalProto.internal_static_google_ads_googleads_v20_resources_CustomerConversionGoal_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.resources.CustomerConversionGoalProto.internal_static_google_ads_googleads_v20_resources_CustomerConversionGoal_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.resources.CustomerConversionGoal.class, com.google.ads.googleads.v20.resources.CustomerConversionGoal.Builder.class); } // Construct using com.google.ads.googleads.v20.resources.CustomerConversionGoal.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; resourceName_ = ""; category_ = 0; origin_ = 0; biddable_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v20.resources.CustomerConversionGoalProto.internal_static_google_ads_googleads_v20_resources_CustomerConversionGoal_descriptor; } @java.lang.Override public com.google.ads.googleads.v20.resources.CustomerConversionGoal getDefaultInstanceForType() { return com.google.ads.googleads.v20.resources.CustomerConversionGoal.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v20.resources.CustomerConversionGoal build() { com.google.ads.googleads.v20.resources.CustomerConversionGoal result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v20.resources.CustomerConversionGoal buildPartial() { com.google.ads.googleads.v20.resources.CustomerConversionGoal result = new com.google.ads.googleads.v20.resources.CustomerConversionGoal(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v20.resources.CustomerConversionGoal result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.resourceName_ = resourceName_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.category_ = category_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.origin_ = origin_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.biddable_ = biddable_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v20.resources.CustomerConversionGoal) { return mergeFrom((com.google.ads.googleads.v20.resources.CustomerConversionGoal)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v20.resources.CustomerConversionGoal other) { if (other == com.google.ads.googleads.v20.resources.CustomerConversionGoal.getDefaultInstance()) return this; if (!other.getResourceName().isEmpty()) { resourceName_ = other.resourceName_; bitField0_ |= 0x00000001; onChanged(); } if (other.category_ != 0) { setCategoryValue(other.getCategoryValue()); } if (other.origin_ != 0) { setOriginValue(other.getOriginValue()); } if (other.getBiddable() != false) { setBiddable(other.getBiddable()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { resourceName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { category_ = input.readEnum(); bitField0_ |= 0x00000002; break; } // case 16 case 24: { origin_ = input.readEnum(); bitField0_ |= 0x00000004; break; } // case 24 case 32: { biddable_ = input.readBool(); bitField0_ |= 0x00000008; break; } // case 32 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object resourceName_ = ""; /** * <pre> * Immutable. The resource name of the customer conversion goal. * Customer conversion goal resource names have the form: * * `customers/{customer_id}/customerConversionGoals/{category}~{origin}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Immutable. The resource name of the customer conversion goal. * Customer conversion goal resource names have the form: * * `customers/{customer_id}/customerConversionGoals/{category}~{origin}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Immutable. The resource name of the customer conversion goal. * Customer conversion goal resource names have the form: * * `customers/{customer_id}/customerConversionGoals/{category}~{origin}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @param value The resourceName to set. * @return This builder for chaining. */ public Builder setResourceName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } resourceName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * Immutable. The resource name of the customer conversion goal. * Customer conversion goal resource names have the form: * * `customers/{customer_id}/customerConversionGoals/{category}~{origin}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return This builder for chaining. */ public Builder clearResourceName() { resourceName_ = getDefaultInstance().getResourceName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * <pre> * Immutable. The resource name of the customer conversion goal. * Customer conversion goal resource names have the form: * * `customers/{customer_id}/customerConversionGoals/{category}~{origin}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @param value The bytes for resourceName to set. * @return This builder for chaining. */ public Builder setResourceNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); resourceName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int category_ = 0; /** * <pre> * The conversion category of this customer conversion goal. Only * conversion actions that have this category will be included in this goal. * </pre> * * <code>.google.ads.googleads.v20.enums.ConversionActionCategoryEnum.ConversionActionCategory category = 2;</code> * @return The enum numeric value on the wire for category. */ @java.lang.Override public int getCategoryValue() { return category_; } /** * <pre> * The conversion category of this customer conversion goal. Only * conversion actions that have this category will be included in this goal. * </pre> * * <code>.google.ads.googleads.v20.enums.ConversionActionCategoryEnum.ConversionActionCategory category = 2;</code> * @param value The enum numeric value on the wire for category to set. * @return This builder for chaining. */ public Builder setCategoryValue(int value) { category_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * The conversion category of this customer conversion goal. Only * conversion actions that have this category will be included in this goal. * </pre> * * <code>.google.ads.googleads.v20.enums.ConversionActionCategoryEnum.ConversionActionCategory category = 2;</code> * @return The category. */ @java.lang.Override public com.google.ads.googleads.v20.enums.ConversionActionCategoryEnum.ConversionActionCategory getCategory() { com.google.ads.googleads.v20.enums.ConversionActionCategoryEnum.ConversionActionCategory result = com.google.ads.googleads.v20.enums.ConversionActionCategoryEnum.ConversionActionCategory.forNumber(category_); return result == null ? com.google.ads.googleads.v20.enums.ConversionActionCategoryEnum.ConversionActionCategory.UNRECOGNIZED : result; } /** * <pre> * The conversion category of this customer conversion goal. Only * conversion actions that have this category will be included in this goal. * </pre> * * <code>.google.ads.googleads.v20.enums.ConversionActionCategoryEnum.ConversionActionCategory category = 2;</code> * @param value The category to set. * @return This builder for chaining. */ public Builder setCategory(com.google.ads.googleads.v20.enums.ConversionActionCategoryEnum.ConversionActionCategory value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; category_ = value.getNumber(); onChanged(); return this; } /** * <pre> * The conversion category of this customer conversion goal. Only * conversion actions that have this category will be included in this goal. * </pre> * * <code>.google.ads.googleads.v20.enums.ConversionActionCategoryEnum.ConversionActionCategory category = 2;</code> * @return This builder for chaining. */ public Builder clearCategory() { bitField0_ = (bitField0_ & ~0x00000002); category_ = 0; onChanged(); return this; } private int origin_ = 0; /** * <pre> * The conversion origin of this customer conversion goal. Only * conversion actions that have this conversion origin will be included in * this goal. * </pre> * * <code>.google.ads.googleads.v20.enums.ConversionOriginEnum.ConversionOrigin origin = 3;</code> * @return The enum numeric value on the wire for origin. */ @java.lang.Override public int getOriginValue() { return origin_; } /** * <pre> * The conversion origin of this customer conversion goal. Only * conversion actions that have this conversion origin will be included in * this goal. * </pre> * * <code>.google.ads.googleads.v20.enums.ConversionOriginEnum.ConversionOrigin origin = 3;</code> * @param value The enum numeric value on the wire for origin to set. * @return This builder for chaining. */ public Builder setOriginValue(int value) { origin_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * <pre> * The conversion origin of this customer conversion goal. Only * conversion actions that have this conversion origin will be included in * this goal. * </pre> * * <code>.google.ads.googleads.v20.enums.ConversionOriginEnum.ConversionOrigin origin = 3;</code> * @return The origin. */ @java.lang.Override public com.google.ads.googleads.v20.enums.ConversionOriginEnum.ConversionOrigin getOrigin() { com.google.ads.googleads.v20.enums.ConversionOriginEnum.ConversionOrigin result = com.google.ads.googleads.v20.enums.ConversionOriginEnum.ConversionOrigin.forNumber(origin_); return result == null ? com.google.ads.googleads.v20.enums.ConversionOriginEnum.ConversionOrigin.UNRECOGNIZED : result; } /** * <pre> * The conversion origin of this customer conversion goal. Only * conversion actions that have this conversion origin will be included in * this goal. * </pre> * * <code>.google.ads.googleads.v20.enums.ConversionOriginEnum.ConversionOrigin origin = 3;</code> * @param value The origin to set. * @return This builder for chaining. */ public Builder setOrigin(com.google.ads.googleads.v20.enums.ConversionOriginEnum.ConversionOrigin value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; origin_ = value.getNumber(); onChanged(); return this; } /** * <pre> * The conversion origin of this customer conversion goal. Only * conversion actions that have this conversion origin will be included in * this goal. * </pre> * * <code>.google.ads.googleads.v20.enums.ConversionOriginEnum.ConversionOrigin origin = 3;</code> * @return This builder for chaining. */ public Builder clearOrigin() { bitField0_ = (bitField0_ & ~0x00000004); origin_ = 0; onChanged(); return this; } private boolean biddable_ ; /** * <pre> * The biddability of the customer conversion goal. * </pre> * * <code>bool biddable = 4;</code> * @return The biddable. */ @java.lang.Override public boolean getBiddable() { return biddable_; } /** * <pre> * The biddability of the customer conversion goal. * </pre> * * <code>bool biddable = 4;</code> * @param value The biddable to set. * @return This builder for chaining. */ public Builder setBiddable(boolean value) { biddable_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * <pre> * The biddability of the customer conversion goal. * </pre> * * <code>bool biddable = 4;</code> * @return This builder for chaining. */ public Builder clearBiddable() { bitField0_ = (bitField0_ & ~0x00000008); biddable_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.resources.CustomerConversionGoal) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v20.resources.CustomerConversionGoal) private static final com.google.ads.googleads.v20.resources.CustomerConversionGoal DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v20.resources.CustomerConversionGoal(); } public static com.google.ads.googleads.v20.resources.CustomerConversionGoal getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CustomerConversionGoal> PARSER = new com.google.protobuf.AbstractParser<CustomerConversionGoal>() { @java.lang.Override public CustomerConversionGoal parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CustomerConversionGoal> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CustomerConversionGoal> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v20.resources.CustomerConversionGoal getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,256
google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/resources/CustomerConversionGoal.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v21/resources/customer_conversion_goal.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v21.resources; /** * <pre> * Biddability control for conversion actions with a matching category and * origin. * </pre> * * Protobuf type {@code google.ads.googleads.v21.resources.CustomerConversionGoal} */ public final class CustomerConversionGoal extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v21.resources.CustomerConversionGoal) CustomerConversionGoalOrBuilder { private static final long serialVersionUID = 0L; // Use CustomerConversionGoal.newBuilder() to construct. private CustomerConversionGoal(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CustomerConversionGoal() { resourceName_ = ""; category_ = 0; origin_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new CustomerConversionGoal(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.resources.CustomerConversionGoalProto.internal_static_google_ads_googleads_v21_resources_CustomerConversionGoal_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.resources.CustomerConversionGoalProto.internal_static_google_ads_googleads_v21_resources_CustomerConversionGoal_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.resources.CustomerConversionGoal.class, com.google.ads.googleads.v21.resources.CustomerConversionGoal.Builder.class); } public static final int RESOURCE_NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object resourceName_ = ""; /** * <pre> * Immutable. The resource name of the customer conversion goal. * Customer conversion goal resource names have the form: * * `customers/{customer_id}/customerConversionGoals/{category}~{origin}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ @java.lang.Override public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } } /** * <pre> * Immutable. The resource name of the customer conversion goal. * Customer conversion goal resource names have the form: * * `customers/{customer_id}/customerConversionGoals/{category}~{origin}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ @java.lang.Override public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int CATEGORY_FIELD_NUMBER = 2; private int category_ = 0; /** * <pre> * The conversion category of this customer conversion goal. Only * conversion actions that have this category will be included in this goal. * </pre> * * <code>.google.ads.googleads.v21.enums.ConversionActionCategoryEnum.ConversionActionCategory category = 2;</code> * @return The enum numeric value on the wire for category. */ @java.lang.Override public int getCategoryValue() { return category_; } /** * <pre> * The conversion category of this customer conversion goal. Only * conversion actions that have this category will be included in this goal. * </pre> * * <code>.google.ads.googleads.v21.enums.ConversionActionCategoryEnum.ConversionActionCategory category = 2;</code> * @return The category. */ @java.lang.Override public com.google.ads.googleads.v21.enums.ConversionActionCategoryEnum.ConversionActionCategory getCategory() { com.google.ads.googleads.v21.enums.ConversionActionCategoryEnum.ConversionActionCategory result = com.google.ads.googleads.v21.enums.ConversionActionCategoryEnum.ConversionActionCategory.forNumber(category_); return result == null ? com.google.ads.googleads.v21.enums.ConversionActionCategoryEnum.ConversionActionCategory.UNRECOGNIZED : result; } public static final int ORIGIN_FIELD_NUMBER = 3; private int origin_ = 0; /** * <pre> * The conversion origin of this customer conversion goal. Only * conversion actions that have this conversion origin will be included in * this goal. * </pre> * * <code>.google.ads.googleads.v21.enums.ConversionOriginEnum.ConversionOrigin origin = 3;</code> * @return The enum numeric value on the wire for origin. */ @java.lang.Override public int getOriginValue() { return origin_; } /** * <pre> * The conversion origin of this customer conversion goal. Only * conversion actions that have this conversion origin will be included in * this goal. * </pre> * * <code>.google.ads.googleads.v21.enums.ConversionOriginEnum.ConversionOrigin origin = 3;</code> * @return The origin. */ @java.lang.Override public com.google.ads.googleads.v21.enums.ConversionOriginEnum.ConversionOrigin getOrigin() { com.google.ads.googleads.v21.enums.ConversionOriginEnum.ConversionOrigin result = com.google.ads.googleads.v21.enums.ConversionOriginEnum.ConversionOrigin.forNumber(origin_); return result == null ? com.google.ads.googleads.v21.enums.ConversionOriginEnum.ConversionOrigin.UNRECOGNIZED : result; } public static final int BIDDABLE_FIELD_NUMBER = 4; private boolean biddable_ = false; /** * <pre> * The biddability of the customer conversion goal. * </pre> * * <code>bool biddable = 4;</code> * @return The biddable. */ @java.lang.Override public boolean getBiddable() { return biddable_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceName_); } if (category_ != com.google.ads.googleads.v21.enums.ConversionActionCategoryEnum.ConversionActionCategory.UNSPECIFIED.getNumber()) { output.writeEnum(2, category_); } if (origin_ != com.google.ads.googleads.v21.enums.ConversionOriginEnum.ConversionOrigin.UNSPECIFIED.getNumber()) { output.writeEnum(3, origin_); } if (biddable_ != false) { output.writeBool(4, biddable_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceName_); } if (category_ != com.google.ads.googleads.v21.enums.ConversionActionCategoryEnum.ConversionActionCategory.UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(2, category_); } if (origin_ != com.google.ads.googleads.v21.enums.ConversionOriginEnum.ConversionOrigin.UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(3, origin_); } if (biddable_ != false) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(4, biddable_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v21.resources.CustomerConversionGoal)) { return super.equals(obj); } com.google.ads.googleads.v21.resources.CustomerConversionGoal other = (com.google.ads.googleads.v21.resources.CustomerConversionGoal) obj; if (!getResourceName() .equals(other.getResourceName())) return false; if (category_ != other.category_) return false; if (origin_ != other.origin_) return false; if (getBiddable() != other.getBiddable()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER; hash = (53 * hash) + getResourceName().hashCode(); hash = (37 * hash) + CATEGORY_FIELD_NUMBER; hash = (53 * hash) + category_; hash = (37 * hash) + ORIGIN_FIELD_NUMBER; hash = (53 * hash) + origin_; hash = (37 * hash) + BIDDABLE_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( getBiddable()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v21.resources.CustomerConversionGoal parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.resources.CustomerConversionGoal parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.resources.CustomerConversionGoal parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.resources.CustomerConversionGoal parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.resources.CustomerConversionGoal parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.resources.CustomerConversionGoal parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.resources.CustomerConversionGoal parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.resources.CustomerConversionGoal parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.resources.CustomerConversionGoal parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.resources.CustomerConversionGoal parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.resources.CustomerConversionGoal parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.resources.CustomerConversionGoal parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v21.resources.CustomerConversionGoal prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Biddability control for conversion actions with a matching category and * origin. * </pre> * * Protobuf type {@code google.ads.googleads.v21.resources.CustomerConversionGoal} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.resources.CustomerConversionGoal) com.google.ads.googleads.v21.resources.CustomerConversionGoalOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.resources.CustomerConversionGoalProto.internal_static_google_ads_googleads_v21_resources_CustomerConversionGoal_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.resources.CustomerConversionGoalProto.internal_static_google_ads_googleads_v21_resources_CustomerConversionGoal_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.resources.CustomerConversionGoal.class, com.google.ads.googleads.v21.resources.CustomerConversionGoal.Builder.class); } // Construct using com.google.ads.googleads.v21.resources.CustomerConversionGoal.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; resourceName_ = ""; category_ = 0; origin_ = 0; biddable_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v21.resources.CustomerConversionGoalProto.internal_static_google_ads_googleads_v21_resources_CustomerConversionGoal_descriptor; } @java.lang.Override public com.google.ads.googleads.v21.resources.CustomerConversionGoal getDefaultInstanceForType() { return com.google.ads.googleads.v21.resources.CustomerConversionGoal.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v21.resources.CustomerConversionGoal build() { com.google.ads.googleads.v21.resources.CustomerConversionGoal result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v21.resources.CustomerConversionGoal buildPartial() { com.google.ads.googleads.v21.resources.CustomerConversionGoal result = new com.google.ads.googleads.v21.resources.CustomerConversionGoal(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v21.resources.CustomerConversionGoal result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.resourceName_ = resourceName_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.category_ = category_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.origin_ = origin_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.biddable_ = biddable_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v21.resources.CustomerConversionGoal) { return mergeFrom((com.google.ads.googleads.v21.resources.CustomerConversionGoal)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v21.resources.CustomerConversionGoal other) { if (other == com.google.ads.googleads.v21.resources.CustomerConversionGoal.getDefaultInstance()) return this; if (!other.getResourceName().isEmpty()) { resourceName_ = other.resourceName_; bitField0_ |= 0x00000001; onChanged(); } if (other.category_ != 0) { setCategoryValue(other.getCategoryValue()); } if (other.origin_ != 0) { setOriginValue(other.getOriginValue()); } if (other.getBiddable() != false) { setBiddable(other.getBiddable()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { resourceName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { category_ = input.readEnum(); bitField0_ |= 0x00000002; break; } // case 16 case 24: { origin_ = input.readEnum(); bitField0_ |= 0x00000004; break; } // case 24 case 32: { biddable_ = input.readBool(); bitField0_ |= 0x00000008; break; } // case 32 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object resourceName_ = ""; /** * <pre> * Immutable. The resource name of the customer conversion goal. * Customer conversion goal resource names have the form: * * `customers/{customer_id}/customerConversionGoals/{category}~{origin}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Immutable. The resource name of the customer conversion goal. * Customer conversion goal resource names have the form: * * `customers/{customer_id}/customerConversionGoals/{category}~{origin}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Immutable. The resource name of the customer conversion goal. * Customer conversion goal resource names have the form: * * `customers/{customer_id}/customerConversionGoals/{category}~{origin}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @param value The resourceName to set. * @return This builder for chaining. */ public Builder setResourceName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } resourceName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * Immutable. The resource name of the customer conversion goal. * Customer conversion goal resource names have the form: * * `customers/{customer_id}/customerConversionGoals/{category}~{origin}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return This builder for chaining. */ public Builder clearResourceName() { resourceName_ = getDefaultInstance().getResourceName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * <pre> * Immutable. The resource name of the customer conversion goal. * Customer conversion goal resource names have the form: * * `customers/{customer_id}/customerConversionGoals/{category}~{origin}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @param value The bytes for resourceName to set. * @return This builder for chaining. */ public Builder setResourceNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); resourceName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int category_ = 0; /** * <pre> * The conversion category of this customer conversion goal. Only * conversion actions that have this category will be included in this goal. * </pre> * * <code>.google.ads.googleads.v21.enums.ConversionActionCategoryEnum.ConversionActionCategory category = 2;</code> * @return The enum numeric value on the wire for category. */ @java.lang.Override public int getCategoryValue() { return category_; } /** * <pre> * The conversion category of this customer conversion goal. Only * conversion actions that have this category will be included in this goal. * </pre> * * <code>.google.ads.googleads.v21.enums.ConversionActionCategoryEnum.ConversionActionCategory category = 2;</code> * @param value The enum numeric value on the wire for category to set. * @return This builder for chaining. */ public Builder setCategoryValue(int value) { category_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * The conversion category of this customer conversion goal. Only * conversion actions that have this category will be included in this goal. * </pre> * * <code>.google.ads.googleads.v21.enums.ConversionActionCategoryEnum.ConversionActionCategory category = 2;</code> * @return The category. */ @java.lang.Override public com.google.ads.googleads.v21.enums.ConversionActionCategoryEnum.ConversionActionCategory getCategory() { com.google.ads.googleads.v21.enums.ConversionActionCategoryEnum.ConversionActionCategory result = com.google.ads.googleads.v21.enums.ConversionActionCategoryEnum.ConversionActionCategory.forNumber(category_); return result == null ? com.google.ads.googleads.v21.enums.ConversionActionCategoryEnum.ConversionActionCategory.UNRECOGNIZED : result; } /** * <pre> * The conversion category of this customer conversion goal. Only * conversion actions that have this category will be included in this goal. * </pre> * * <code>.google.ads.googleads.v21.enums.ConversionActionCategoryEnum.ConversionActionCategory category = 2;</code> * @param value The category to set. * @return This builder for chaining. */ public Builder setCategory(com.google.ads.googleads.v21.enums.ConversionActionCategoryEnum.ConversionActionCategory value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; category_ = value.getNumber(); onChanged(); return this; } /** * <pre> * The conversion category of this customer conversion goal. Only * conversion actions that have this category will be included in this goal. * </pre> * * <code>.google.ads.googleads.v21.enums.ConversionActionCategoryEnum.ConversionActionCategory category = 2;</code> * @return This builder for chaining. */ public Builder clearCategory() { bitField0_ = (bitField0_ & ~0x00000002); category_ = 0; onChanged(); return this; } private int origin_ = 0; /** * <pre> * The conversion origin of this customer conversion goal. Only * conversion actions that have this conversion origin will be included in * this goal. * </pre> * * <code>.google.ads.googleads.v21.enums.ConversionOriginEnum.ConversionOrigin origin = 3;</code> * @return The enum numeric value on the wire for origin. */ @java.lang.Override public int getOriginValue() { return origin_; } /** * <pre> * The conversion origin of this customer conversion goal. Only * conversion actions that have this conversion origin will be included in * this goal. * </pre> * * <code>.google.ads.googleads.v21.enums.ConversionOriginEnum.ConversionOrigin origin = 3;</code> * @param value The enum numeric value on the wire for origin to set. * @return This builder for chaining. */ public Builder setOriginValue(int value) { origin_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * <pre> * The conversion origin of this customer conversion goal. Only * conversion actions that have this conversion origin will be included in * this goal. * </pre> * * <code>.google.ads.googleads.v21.enums.ConversionOriginEnum.ConversionOrigin origin = 3;</code> * @return The origin. */ @java.lang.Override public com.google.ads.googleads.v21.enums.ConversionOriginEnum.ConversionOrigin getOrigin() { com.google.ads.googleads.v21.enums.ConversionOriginEnum.ConversionOrigin result = com.google.ads.googleads.v21.enums.ConversionOriginEnum.ConversionOrigin.forNumber(origin_); return result == null ? com.google.ads.googleads.v21.enums.ConversionOriginEnum.ConversionOrigin.UNRECOGNIZED : result; } /** * <pre> * The conversion origin of this customer conversion goal. Only * conversion actions that have this conversion origin will be included in * this goal. * </pre> * * <code>.google.ads.googleads.v21.enums.ConversionOriginEnum.ConversionOrigin origin = 3;</code> * @param value The origin to set. * @return This builder for chaining. */ public Builder setOrigin(com.google.ads.googleads.v21.enums.ConversionOriginEnum.ConversionOrigin value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; origin_ = value.getNumber(); onChanged(); return this; } /** * <pre> * The conversion origin of this customer conversion goal. Only * conversion actions that have this conversion origin will be included in * this goal. * </pre> * * <code>.google.ads.googleads.v21.enums.ConversionOriginEnum.ConversionOrigin origin = 3;</code> * @return This builder for chaining. */ public Builder clearOrigin() { bitField0_ = (bitField0_ & ~0x00000004); origin_ = 0; onChanged(); return this; } private boolean biddable_ ; /** * <pre> * The biddability of the customer conversion goal. * </pre> * * <code>bool biddable = 4;</code> * @return The biddable. */ @java.lang.Override public boolean getBiddable() { return biddable_; } /** * <pre> * The biddability of the customer conversion goal. * </pre> * * <code>bool biddable = 4;</code> * @param value The biddable to set. * @return This builder for chaining. */ public Builder setBiddable(boolean value) { biddable_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * <pre> * The biddability of the customer conversion goal. * </pre> * * <code>bool biddable = 4;</code> * @return This builder for chaining. */ public Builder clearBiddable() { bitField0_ = (bitField0_ & ~0x00000008); biddable_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.resources.CustomerConversionGoal) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v21.resources.CustomerConversionGoal) private static final com.google.ads.googleads.v21.resources.CustomerConversionGoal DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v21.resources.CustomerConversionGoal(); } public static com.google.ads.googleads.v21.resources.CustomerConversionGoal getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CustomerConversionGoal> PARSER = new com.google.protobuf.AbstractParser<CustomerConversionGoal>() { @java.lang.Override public CustomerConversionGoal parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CustomerConversionGoal> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CustomerConversionGoal> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v21.resources.CustomerConversionGoal getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,112
java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateSpecialistPoolRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1beta1/specialist_pool_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1beta1; /** * * * <pre> * Request message for * [SpecialistPoolService.UpdateSpecialistPool][google.cloud.aiplatform.v1beta1.SpecialistPoolService.UpdateSpecialistPool]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest} */ public final class UpdateSpecialistPoolRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest) UpdateSpecialistPoolRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateSpecialistPoolRequest.newBuilder() to construct. private UpdateSpecialistPoolRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateSpecialistPoolRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateSpecialistPoolRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.SpecialistPoolServiceProto .internal_static_google_cloud_aiplatform_v1beta1_UpdateSpecialistPoolRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.SpecialistPoolServiceProto .internal_static_google_cloud_aiplatform_v1beta1_UpdateSpecialistPoolRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest.class, com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest.Builder.class); } private int bitField0_; public static final int SPECIALIST_POOL_FIELD_NUMBER = 1; private com.google.cloud.aiplatform.v1beta1.SpecialistPool specialistPool_; /** * * * <pre> * Required. The SpecialistPool which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.SpecialistPool specialist_pool = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the specialistPool field is set. */ @java.lang.Override public boolean hasSpecialistPool() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The SpecialistPool which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.SpecialistPool specialist_pool = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The specialistPool. */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.SpecialistPool getSpecialistPool() { return specialistPool_ == null ? com.google.cloud.aiplatform.v1beta1.SpecialistPool.getDefaultInstance() : specialistPool_; } /** * * * <pre> * Required. The SpecialistPool which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.SpecialistPool specialist_pool = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.SpecialistPoolOrBuilder getSpecialistPoolOrBuilder() { return specialistPool_ == null ? com.google.cloud.aiplatform.v1beta1.SpecialistPool.getDefaultInstance() : specialistPool_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Required. The update mask applies to the resource. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The update mask applies to the resource. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Required. The update mask applies to the resource. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getSpecialistPool()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getSpecialistPool()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest)) { return super.equals(obj); } com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest other = (com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest) obj; if (hasSpecialistPool() != other.hasSpecialistPool()) return false; if (hasSpecialistPool()) { if (!getSpecialistPool().equals(other.getSpecialistPool())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasSpecialistPool()) { hash = (37 * hash) + SPECIALIST_POOL_FIELD_NUMBER; hash = (53 * hash) + getSpecialistPool().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [SpecialistPoolService.UpdateSpecialistPool][google.cloud.aiplatform.v1beta1.SpecialistPoolService.UpdateSpecialistPool]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest) com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.SpecialistPoolServiceProto .internal_static_google_cloud_aiplatform_v1beta1_UpdateSpecialistPoolRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.SpecialistPoolServiceProto .internal_static_google_cloud_aiplatform_v1beta1_UpdateSpecialistPoolRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest.class, com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest.Builder.class); } // Construct using com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getSpecialistPoolFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; specialistPool_ = null; if (specialistPoolBuilder_ != null) { specialistPoolBuilder_.dispose(); specialistPoolBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1beta1.SpecialistPoolServiceProto .internal_static_google_cloud_aiplatform_v1beta1_UpdateSpecialistPoolRequest_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest build() { com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest buildPartial() { com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest result = new com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.specialistPool_ = specialistPoolBuilder_ == null ? specialistPool_ : specialistPoolBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest) { return mergeFrom((com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest other) { if (other == com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest.getDefaultInstance()) return this; if (other.hasSpecialistPool()) { mergeSpecialistPool(other.getSpecialistPool()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getSpecialistPoolFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.aiplatform.v1beta1.SpecialistPool specialistPool_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.SpecialistPool, com.google.cloud.aiplatform.v1beta1.SpecialistPool.Builder, com.google.cloud.aiplatform.v1beta1.SpecialistPoolOrBuilder> specialistPoolBuilder_; /** * * * <pre> * Required. The SpecialistPool which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.SpecialistPool specialist_pool = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the specialistPool field is set. */ public boolean hasSpecialistPool() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The SpecialistPool which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.SpecialistPool specialist_pool = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The specialistPool. */ public com.google.cloud.aiplatform.v1beta1.SpecialistPool getSpecialistPool() { if (specialistPoolBuilder_ == null) { return specialistPool_ == null ? com.google.cloud.aiplatform.v1beta1.SpecialistPool.getDefaultInstance() : specialistPool_; } else { return specialistPoolBuilder_.getMessage(); } } /** * * * <pre> * Required. The SpecialistPool which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.SpecialistPool specialist_pool = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setSpecialistPool(com.google.cloud.aiplatform.v1beta1.SpecialistPool value) { if (specialistPoolBuilder_ == null) { if (value == null) { throw new NullPointerException(); } specialistPool_ = value; } else { specialistPoolBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The SpecialistPool which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.SpecialistPool specialist_pool = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setSpecialistPool( com.google.cloud.aiplatform.v1beta1.SpecialistPool.Builder builderForValue) { if (specialistPoolBuilder_ == null) { specialistPool_ = builderForValue.build(); } else { specialistPoolBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The SpecialistPool which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.SpecialistPool specialist_pool = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeSpecialistPool(com.google.cloud.aiplatform.v1beta1.SpecialistPool value) { if (specialistPoolBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && specialistPool_ != null && specialistPool_ != com.google.cloud.aiplatform.v1beta1.SpecialistPool.getDefaultInstance()) { getSpecialistPoolBuilder().mergeFrom(value); } else { specialistPool_ = value; } } else { specialistPoolBuilder_.mergeFrom(value); } if (specialistPool_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The SpecialistPool which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.SpecialistPool specialist_pool = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearSpecialistPool() { bitField0_ = (bitField0_ & ~0x00000001); specialistPool_ = null; if (specialistPoolBuilder_ != null) { specialistPoolBuilder_.dispose(); specialistPoolBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The SpecialistPool which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.SpecialistPool specialist_pool = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1beta1.SpecialistPool.Builder getSpecialistPoolBuilder() { bitField0_ |= 0x00000001; onChanged(); return getSpecialistPoolFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The SpecialistPool which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.SpecialistPool specialist_pool = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1beta1.SpecialistPoolOrBuilder getSpecialistPoolOrBuilder() { if (specialistPoolBuilder_ != null) { return specialistPoolBuilder_.getMessageOrBuilder(); } else { return specialistPool_ == null ? com.google.cloud.aiplatform.v1beta1.SpecialistPool.getDefaultInstance() : specialistPool_; } } /** * * * <pre> * Required. The SpecialistPool which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.SpecialistPool specialist_pool = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.SpecialistPool, com.google.cloud.aiplatform.v1beta1.SpecialistPool.Builder, com.google.cloud.aiplatform.v1beta1.SpecialistPoolOrBuilder> getSpecialistPoolFieldBuilder() { if (specialistPoolBuilder_ == null) { specialistPoolBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.SpecialistPool, com.google.cloud.aiplatform.v1beta1.SpecialistPool.Builder, com.google.cloud.aiplatform.v1beta1.SpecialistPoolOrBuilder>( getSpecialistPool(), getParentForChildren(), isClean()); specialistPool_ = null; } return specialistPoolBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Required. The update mask applies to the resource. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The update mask applies to the resource. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Required. The update mask applies to the resource. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The update mask applies to the resource. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The update mask applies to the resource. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The update mask applies to the resource. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The update mask applies to the resource. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The update mask applies to the resource. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Required. The update mask applies to the resource. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest) private static final com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest(); } public static com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateSpecialistPoolRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateSpecialistPoolRequest>() { @java.lang.Override public UpdateSpecialistPoolRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateSpecialistPoolRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateSpecialistPoolRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.UpdateSpecialistPoolRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/zeppelin
35,018
zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/Note.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zeppelin.notebook; import com.google.common.annotations.VisibleForTesting; import org.apache.commons.lang3.StringUtils; import org.apache.zeppelin.common.JsonSerializable; import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.display.AngularObject; import org.apache.zeppelin.display.AngularObjectRegistry; import org.apache.zeppelin.display.Input; import org.apache.zeppelin.interpreter.ExecutionContext; import org.apache.zeppelin.interpreter.Interpreter; import org.apache.zeppelin.interpreter.InterpreterFactory; import org.apache.zeppelin.interpreter.InterpreterGroup; import org.apache.zeppelin.interpreter.InterpreterNotFoundException; import org.apache.zeppelin.interpreter.InterpreterResult; import org.apache.zeppelin.interpreter.InterpreterSetting; import org.apache.zeppelin.interpreter.InterpreterSettingManager; import org.apache.zeppelin.interpreter.ManagedInterpreterGroup; import org.apache.zeppelin.interpreter.remote.RemoteAngularObject; import org.apache.zeppelin.interpreter.remote.RemoteAngularObjectRegistry; import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion; import org.apache.zeppelin.notebook.utility.IdHashes; import org.apache.zeppelin.scheduler.ExecutorFactory; import org.apache.zeppelin.scheduler.Job.Status; import org.apache.zeppelin.user.AuthenticationInfo; import org.apache.zeppelin.user.Credentials; import org.apache.zeppelin.util.Util; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.locks.ReentrantReadWriteLock; /** * Represent the note of Zeppelin. All the note and its paragraph operations are done * via this class. */ public class Note implements JsonSerializable { private static final Logger LOGGER = LoggerFactory.getLogger(Note.class); private static final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss"); private CopyOnWriteArrayList<Paragraph> paragraphs = new CopyOnWriteArrayList<>(); private String name = ""; private String id; private String defaultInterpreterGroup; private String version; private Map<String, Object> noteParams = new LinkedHashMap<>(); private Map<String, Input> noteForms = new LinkedHashMap<>(); private Map<String, List<AngularObject>> angularObjects = new HashMap<>(); /* * note configurations. * - looknfeel - cron */ private Map<String, Object> config = new HashMap<>(); /* * note information. * - cron : cron expression validity. */ private Map<String, Object> info = new HashMap<>(); // The front end needs to judge TRASH_FOLDER according to the path, // But it doesn't need to be saved in note json. So we will exclude this when saving // note to NotebookRepo. private String path; /********************************** transient fields ******************************************/ /* * Do not use the fair algorithm, because it blocks read accesses when a write access is waiting. * We have read accesses from different threads, which are dependent on each other. * The fair behavior can therefore create a DeadLock. */ private transient final ReentrantReadWriteLock lock = new ReentrantReadWriteLock(false); private transient boolean removed = false; private transient InterpreterFactory interpreterFactory; private transient InterpreterSettingManager interpreterSettingManager; private transient ParagraphJobListener paragraphJobListener; private transient List<NoteEventListener> noteEventListeners = new ArrayList<>(); private transient Credentials credentials; private transient ZeppelinConfiguration zConf; private transient NoteParser noteParser; public Note() { generateId(); } public Note(String path, String defaultInterpreterGroup, InterpreterFactory factory, InterpreterSettingManager interpreterSettingManager, ParagraphJobListener paragraphJobListener, Credentials credentials, List<NoteEventListener> noteEventListener, ZeppelinConfiguration zConf, NoteParser noteParser) { setPath(path); this.defaultInterpreterGroup = defaultInterpreterGroup; this.interpreterFactory = factory; this.interpreterSettingManager = interpreterSettingManager; this.paragraphJobListener = paragraphJobListener; this.noteEventListeners = noteEventListener; this.credentials = credentials; this.zConf = zConf; this.noteParser = noteParser; this.version = Util.getVersion(); generateId(); setCronSupported(zConf); } public String getPath() { return path; } public String getParentPath() { int pos = path.lastIndexOf('/'); if (pos == 0) { return "/"; } else { return path.substring(0, pos); } } private String getName(String path) { int pos = path.lastIndexOf('/'); return path.substring(pos + 1); } private void generateId() { id = IdHashes.generateId(); } public boolean isParagraphRunning() { if (paragraphs != null) { for (Paragraph p : paragraphs) { if (p.isRunning()) { return true; } } } return false; } public boolean isPersonalizedMode() { Object v = getConfig().get("personalizedMode"); return null != v && "true".equals(v); } public void setPersonalizedMode(Boolean value) { String valueString; if (value.booleanValue()) { valueString = "true"; } else { valueString = "false"; } config.put("personalizedMode", valueString); clearUserParagraphs(value); } private void clearUserParagraphs(boolean isPersonalized) { if (!isPersonalized) { for (Paragraph p : paragraphs) { p.clearUserParagraphs(); } } } public String getId() { return id; } @VisibleForTesting public void setId(String id) { this.id = id; } public String getName() { return name; } public void setPath(String path) { if (!path.startsWith("/")) { this.path = "/" + path; } else { this.path = path; } this.name = getName(path); } public void setVersion(String version) { this.version = version; } public String getDefaultInterpreterGroup() { if (StringUtils.isBlank(defaultInterpreterGroup)) { defaultInterpreterGroup = zConf.getString(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_GROUP_DEFAULT); } return defaultInterpreterGroup; } public void setDefaultInterpreterGroup(String defaultInterpreterGroup) { this.defaultInterpreterGroup = defaultInterpreterGroup; } public Map<String, Object> getNoteParams() { return noteParams; } public void setNoteParams(Map<String, Object> noteParams) { this.noteParams = noteParams; } public Map<String, Input> getNoteForms() { return noteForms; } public void setNoteForms(Map<String, Input> noteForms) { this.noteForms = noteForms; } public void setName(String name) { this.name = name; // for the notes before 0.9, get path from name. if (this.path == null) { if (name.startsWith("/")) { this.path = name; } else { this.path = "/" + name; } } else { int pos = this.path.lastIndexOf('/'); this.path = this.path.substring(0, pos + 1) + this.name; } } public InterpreterFactory getInterpreterFactory() { return interpreterFactory; } public void setInterpreterFactory(InterpreterFactory interpreterFactory) { this.interpreterFactory = interpreterFactory; } void setInterpreterSettingManager(InterpreterSettingManager interpreterSettingManager) { this.interpreterSettingManager = interpreterSettingManager; } InterpreterSettingManager getInterpreterSettingManager() { return this.interpreterSettingManager; } void setParagraphJobListener(ParagraphJobListener paragraphJobListener) { this.paragraphJobListener = paragraphJobListener; } public boolean isCronSupported(ZeppelinConfiguration zConf) { if (zConf.isZeppelinNotebookCronEnable()) { zConf.getZeppelinNotebookCronFolders(); if (StringUtils.isBlank(zConf.getZeppelinNotebookCronFolders())) { return true; } else { for (String folder : zConf.getZeppelinNotebookCronFolders().split(",")) { if (this.path.startsWith(folder)) { return true; } } } } return false; } public void setCronSupported(ZeppelinConfiguration zConf) { getConfig().put("isZeppelinNotebookCronEnable", isCronSupported(zConf)); } public Credentials getCredentials() { return credentials; } public void setCredentials(Credentials credentials) { this.credentials = credentials; } Map<String, List<AngularObject>> getAngularObjects() { return angularObjects; } public List<AngularObject> getAngularObjects(String intpGroupId) { if (!angularObjects.containsKey(intpGroupId)) { return new ArrayList<>(); } return angularObjects.get(intpGroupId); } /** * Add or update the note AngularObject. */ public void addOrUpdateAngularObject(String intpGroupId, AngularObject angularObject) { List<AngularObject> angularObjectList; if (!angularObjects.containsKey(intpGroupId)) { angularObjectList = new ArrayList<>(); angularObjects.put(intpGroupId, angularObjectList); } else { angularObjectList = angularObjects.get(intpGroupId); // Delete existing AngularObject Iterator<AngularObject> iter = angularObjectList.iterator(); while(iter.hasNext()){ String noteId = ""; String paragraphId = ""; String name = ""; Object object = iter.next(); if (object instanceof AngularObject) { AngularObject ao = (AngularObject)object; noteId = ao.getNoteId(); paragraphId = ao.getParagraphId(); name = ao.getName(); } else if (object instanceof RemoteAngularObject) { RemoteAngularObject rao = (RemoteAngularObject)object; noteId = rao.getNoteId(); paragraphId = rao.getParagraphId(); name = rao.getName(); } else { continue; } if (StringUtils.equals(noteId, angularObject.getNoteId()) && StringUtils.equals(paragraphId, angularObject.getParagraphId()) && StringUtils.equals(name, angularObject.getName())) { iter.remove(); } } } angularObjectList.add(angularObject); } /** * Delete the note AngularObject. */ public void deleteAngularObject(String intpGroupId, String noteId, String paragraphId, String name) { if (angularObjects.containsKey(intpGroupId)) { // Delete existing AngularObject Iterator<AngularObject> iter = angularObjects.get(intpGroupId).iterator(); while(iter.hasNext()){ String noteIdCandidate = ""; String paragraphIdCandidate = ""; String nameCandidate = ""; Object object = iter.next(); if (object instanceof AngularObject) { AngularObject ao = (AngularObject)object; noteIdCandidate = ao.getNoteId(); paragraphIdCandidate = ao.getParagraphId(); nameCandidate = ao.getName(); } else if (object instanceof RemoteAngularObject) { RemoteAngularObject rao = (RemoteAngularObject) object; noteIdCandidate = rao.getNoteId(); paragraphIdCandidate = rao.getParagraphId(); nameCandidate = rao.getName(); } else { continue; } if (StringUtils.equals(noteId, noteIdCandidate) && StringUtils.equals(paragraphId, paragraphIdCandidate) && StringUtils.equals(name, nameCandidate)) { iter.remove(); } } } } /** * Create a new paragraph and add it to the end of the note. */ public Paragraph addNewParagraph(AuthenticationInfo authenticationInfo) { return insertNewParagraph(paragraphs.size(), authenticationInfo); } /** * Clone paragraph and add it to note. * * @param srcParagraph source paragraph */ void addCloneParagraph(Paragraph srcParagraph, AuthenticationInfo subject) { // Keep paragraph original ID Paragraph newParagraph = new Paragraph(srcParagraph.getId(), this, paragraphJobListener); Map<String, Object> config = new HashMap<>(srcParagraph.getConfig()); Map<String, Object> param = srcParagraph.settings.getParams(); Map<String, Input> form = srcParagraph.settings.getForms(); LOGGER.debug("srcParagraph user: {}", srcParagraph.getUser()); newParagraph.setAuthenticationInfo(subject); newParagraph.setConfig(config); newParagraph.settings.setParams(param); newParagraph.settings.setForms(form); newParagraph.setText(srcParagraph.getText()); newParagraph.setTitle(srcParagraph.getTitle()); LOGGER.debug("newParagraph user: {}", newParagraph.getUser()); try { String resultJson = srcParagraph.getReturn().toJson(); InterpreterResult result = InterpreterResult.fromJson(resultJson); newParagraph.setReturn(result, null); } catch (Exception e) { // 'result' part of Note consists of exception, instead of actual interpreter results LOGGER.warn("Paragraph {} has a result with exception. {}", srcParagraph.getId(), e.getMessage()); } paragraphs.add(newParagraph); fireParagraphCreateEvent(newParagraph); } public void fireParagraphCreateEvent(Paragraph p) { for (NoteEventListener listener : noteEventListeners) { listener.onParagraphCreate(p); } } public void fireParagraphRemoveEvent(Paragraph p) { for (NoteEventListener listener : noteEventListeners) { listener.onParagraphRemove(p); } } public void fireParagraphUpdateEvent(Paragraph p) { for (NoteEventListener listener : noteEventListeners) { listener.onParagraphUpdate(p); } } /** * Create a new paragraph and insert it to the note in given index. * * @param index index of paragraphs */ public Paragraph insertNewParagraph(int index, AuthenticationInfo authenticationInfo) { Paragraph paragraph = new Paragraph(this, paragraphJobListener); if (null != interpreterSettingManager) { // Set the default parameter configuration for the paragraph // based on `interpreter-setting.json` config Map<String, Object> config = interpreterSettingManager.getConfigSetting(defaultInterpreterGroup); paragraph.setConfig(config); } paragraph.setAuthenticationInfo(authenticationInfo); setParagraphMagic(paragraph, index); insertParagraph(paragraph, index); return paragraph; } public void addParagraph(Paragraph paragraph) { insertParagraph(paragraph, paragraphs.size()); } private void insertParagraph(Paragraph paragraph, int index) { paragraphs.add(index, paragraph); fireParagraphCreateEvent(paragraph); } /** * Remove paragraph by id. * * @param paragraphId ID of paragraph * @return a paragraph that was deleted, or <code>null</code> otherwise */ public Paragraph removeParagraph(String user, String paragraphId) { removeAllAngularObjectInParagraph(user, paragraphId); interpreterSettingManager.removeResourcesBelongsToParagraph(getId(), paragraphId); for (Paragraph p : paragraphs) { if (p.getId().equals(paragraphId)) { paragraphs.remove(p); fireParagraphRemoveEvent(p); return p; } } return null; } public void clearParagraphOutputFields(Paragraph p) { p.setReturn(null, null); p.cleanRuntimeInfos(); p.cleanOutputBuffer(); } public Paragraph clearPersonalizedParagraphOutput(String paragraphId, String user) { for (Paragraph p : paragraphs) { if (!p.getId().equals(paragraphId)) { continue; } p = p.getUserParagraphMap().get(user); clearParagraphOutputFields(p); return p; } return null; } /** * Clear paragraph output by id. * * @param paragraphId ID of paragraph * @return Paragraph */ public Paragraph clearParagraphOutput(String paragraphId) { for (Paragraph p : paragraphs) { if (!p.getId().equals(paragraphId)) { continue; } clearParagraphOutputFields(p); return p; } return null; } /** * Clear all paragraph output of note */ public void clearAllParagraphOutput() { for (Paragraph p : paragraphs) { p.setReturn(null, null); } } /** * Move paragraph into the new index (order from 0 ~ n-1). * * @param paragraphId ID of paragraph * @param index new index */ public void moveParagraph(String paragraphId, int index) { moveParagraph(paragraphId, index, false); } /** * Move paragraph into the new index (order from 0 ~ n-1). * * @param paragraphId ID of paragraph * @param index new index * @param throwWhenIndexIsOutOfBound whether throw IndexOutOfBoundException * when index is out of bound */ public void moveParagraph(String paragraphId, int index, boolean throwWhenIndexIsOutOfBound) { int oldIndex; Paragraph p = null; if (index < 0 || index >= paragraphs.size()) { if (throwWhenIndexIsOutOfBound) { throw new IndexOutOfBoundsException( "paragraph size is " + paragraphs.size() + " , index is " + index); } else { return; } } for (int i = 0; i < paragraphs.size(); i++) { if (paragraphs.get(i).getId().equals(paragraphId)) { oldIndex = i; if (oldIndex == index) { return; } p = paragraphs.remove(i); } } if (p != null) { paragraphs.add(index, p); } } public boolean isLastParagraph(String paragraphId) { if (!paragraphs.isEmpty()) { if (paragraphId.equals(paragraphs.get(paragraphs.size() - 1).getId())) { return true; } return false; } /** because empty list, cannot remove nothing right? */ return true; } public int getParagraphCount() { return paragraphs.size(); } public Paragraph getParagraph(String paragraphId) { for (Paragraph p : paragraphs) { if (p.getId().equals(paragraphId)) { return p; } } return null; } public Paragraph getParagraph(int index) { return paragraphs.get(index); } public Paragraph getLastParagraph() { return paragraphs.get(paragraphs.size() - 1); } private void setParagraphMagic(Paragraph p, int index) { if (!paragraphs.isEmpty()) { String replName; if (index == 0) { replName = paragraphs.get(0).getIntpText(); } else { replName = paragraphs.get(index - 1).getIntpText(); } if (p.isValidInterpreter(replName) && StringUtils.isNotEmpty(replName)) { p.setText("%" + replName + "\n"); } } } /** * Run all the paragraphs of this note in different kinds of ways: * - blocking/non-blocking * - isolated/non-isolated * * @param authInfo * @param blocking * @param isolated * @throws Exception */ public void runAll(AuthenticationInfo authInfo, boolean blocking, boolean isolated, Map<String, Object> params) throws Exception { if (isRunning()) { throw new Exception("Unable to run note:" + id + " because it is still in RUNNING state."); } setIsolatedMode(isolated); setRunning(true); setStartTime(DATE_TIME_FORMATTER.format(LocalDateTime.now())); if (blocking) { try { runAllSync(authInfo, isolated, params); } finally { setRunning(false); setIsolatedMode(false); clearStartTime(); } } else { ExecutorFactory.singleton().getNoteJobExecutor().submit(() -> { try { runAllSync(authInfo, isolated, params); } catch (Exception e) { LOGGER.warn("Fail to run note: {}", id, e); } finally { setRunning(false); setIsolatedMode(false); clearStartTime(); } }); } } /** * Run all the paragraphs in sync(blocking) way. * * @param authInfo * @param isolated */ private void runAllSync(AuthenticationInfo authInfo, boolean isolated, Map<String, Object> params) throws Exception { try { for (Paragraph p : getParagraphs()) { if (!p.isEnabled()) { continue; } p.setAuthenticationInfo(authInfo); Map<String, Object> originalParams = p.settings.getParams(); try { if (params != null && !params.isEmpty()) { p.settings.setParams(params); } Interpreter interpreter = p.getBindedInterpreter(); if (interpreter != null) { // set interpreter property to execution.mode to be note // so that it could use the correct scheduler. see ZEPPELIN-4832 interpreter.setProperty(".execution.mode", "note"); interpreter.setProperty(".noteId", id); } // Must run each paragraph in blocking way. if (!run(p.getId(), true)) { LOGGER.warn("Skip running the remain notes because paragraph {} fails", p.getId()); return; } } catch (InterpreterNotFoundException e) { p.setInterpreterNotFound(e); } finally { // reset params to the original value p.settings.setParams(originalParams); } } } finally { if (isolated) { LOGGER.info("Releasing interpreters used by this note: {}", id); for (InterpreterSetting setting : getUsedInterpreterSettings()) { setting.closeInterpreters(getExecutionContext()); for (Paragraph p : paragraphs) { p.setInterpreter(null); } } } } } /** * Run a single paragraph in non-blocking way. * * @param paragraphId * @return */ public boolean run(String paragraphId) { return run(paragraphId, false); } /** * Run a single paragraph. * * @param paragraphId ID of paragraph * @param blocking Whether run this paragraph in blocking way */ public boolean run(String paragraphId, boolean blocking) { return run(paragraphId, null, blocking, null); } /** * Run a single paragraph. Return true only when paragraph run successfully. * * @param paragraphId * @param blocking * @param ctxUser * @return */ public boolean run(String paragraphId, String interpreterGroupId, boolean blocking, String ctxUser) { Paragraph p = getParagraph(paragraphId); if (isPersonalizedMode() && ctxUser != null) p = p.getUserParagraph(ctxUser); p.setListener(this.paragraphJobListener); return p.execute(interpreterGroupId, blocking); } /** * Return true if there is a running or pending paragraph */ public boolean haveRunningOrPendingParagraphs() { for (Paragraph p : paragraphs) { Status status = p.getStatus(); if (status.isRunning() || status.isPending()) { return true; } } return false; } public boolean isTrash() { return this.path.startsWith("/" + NoteManager.TRASH_FOLDER); } public List<InterpreterCompletion> completion(String paragraphId, String buffer, int cursor, AuthenticationInfo authInfo) { Paragraph p = getParagraph(paragraphId); p.setListener(this.paragraphJobListener); p.setAuthenticationInfo(authInfo); return p.completion(buffer, cursor); } public List<Paragraph> getParagraphs() { return this.paragraphs; } // TODO(zjffdu) how does this used ? private void snapshotAngularObjectRegistry(String user) { angularObjects = new HashMap<>(); List<InterpreterSetting> settings = getBindedInterpreterSettings(Arrays.asList(user)); if (settings == null || settings.isEmpty()) { return; } for (InterpreterSetting setting : settings) { InterpreterGroup intpGroup = setting.getInterpreterGroup(getExecutionContext()); if (intpGroup != null) { AngularObjectRegistry registry = intpGroup.getAngularObjectRegistry(); angularObjects.put(intpGroup.getId(), registry.getAllWithGlobal(id)); } } } private void removeAllAngularObjectInParagraph(String user, String paragraphId) { angularObjects = new HashMap<>(); List<InterpreterSetting> settings = getBindedInterpreterSettings(Arrays.asList(user)); if (settings == null || settings.isEmpty()) { return; } for (InterpreterSetting setting : settings) { if (setting.getInterpreterGroup(getExecutionContext()) == null) { continue; } InterpreterGroup intpGroup = setting.getInterpreterGroup(getExecutionContext()); AngularObjectRegistry registry = intpGroup.getAngularObjectRegistry(); if (registry instanceof RemoteAngularObjectRegistry) { // remove paragraph scope object ((RemoteAngularObjectRegistry) registry).removeAllAndNotifyRemoteProcess(id, paragraphId); // remove app scope object List<ApplicationState> appStates = getParagraph(paragraphId).getAllApplicationStates(); if (appStates != null) { for (ApplicationState app : appStates) { ((RemoteAngularObjectRegistry) registry) .removeAllAndNotifyRemoteProcess(id, app.getId()); } } } else { registry.removeAll(id, paragraphId); // remove app scope object List<ApplicationState> appStates = getParagraph(paragraphId).getAllApplicationStates(); if (appStates != null) { for (ApplicationState app : appStates) { registry.removeAll(id, app.getId()); } } } } } public List<InterpreterSetting> getBindedInterpreterSettings(List<String> userAndRoles) { // use LinkedHashSet because order matters, the first one represent the default interpreter setting. Set<InterpreterSetting> settings = new LinkedHashSet<>(); // add the default interpreter group InterpreterSetting defaultIntpSetting = interpreterSettingManager.getByName(getDefaultInterpreterGroup()); if (defaultIntpSetting != null) { settings.add(defaultIntpSetting); } // add the interpreter setting with the same group of default interpreter group if (defaultIntpSetting != null) { for (InterpreterSetting intpSetting : interpreterSettingManager.get()) { if (intpSetting.getGroup().equals(defaultIntpSetting.getGroup())) { if (intpSetting.isUserAuthorized(userAndRoles)) { settings.add(intpSetting); } } } } // add interpreter group used by each paragraph for (Paragraph p : getParagraphs()) { try { Interpreter intp = p.getBindedInterpreter(); InterpreterSetting interpreterSetting = ( (ManagedInterpreterGroup) intp.getInterpreterGroup()).getInterpreterSetting(); if (interpreterSetting.isUserAuthorized(userAndRoles)) { settings.add(interpreterSetting); } } catch (InterpreterNotFoundException e) { // ignore this } } return new ArrayList<>(settings); } /** * Get InterpreterSetting used by the paragraphs of this note. * @return */ public List<InterpreterSetting> getUsedInterpreterSettings() { Set<InterpreterSetting> settings = new HashSet<>(); for (Paragraph p : getParagraphs()) { Interpreter intp = p.getInterpreter(); if (intp != null) { settings.add(( (ManagedInterpreterGroup) intp.getInterpreterGroup()).getInterpreterSetting()); } } return new ArrayList<>(settings); } /** * Return new note for specific user. this inserts and replaces user paragraph which doesn't * exists in original paragraph * * @param user specific user * @return new Note for the user */ public Note getUserNote(String user) { Note newNote = new Note(); newNote.name = getName(); newNote.id = getId(); newNote.setConfig(getConfig()); newNote.angularObjects = getAngularObjects(); newNote.setZeppelinConfiguration(zConf); newNote.setNoteParser(noteParser); Paragraph newParagraph; for (Paragraph p : paragraphs) { newParagraph = p.getUserParagraph(user); if (null == newParagraph) { newParagraph = p.cloneParagraphForUser(user); } newNote.paragraphs.add(newParagraph); } return newNote; } public Map<String, Object> getConfig() { if (config == null) { config = new HashMap<>(); } return config; } public void setConfig(Map<String, Object> config) { this.config = config; } public Map<String, Object> getInfo() { if (info == null) { info = new HashMap<>(); } return info; } public void setInfo(Map<String, Object> info) { this.info = info; } public void setRunning(boolean runStatus) { Map<String, Object> infoMap = getInfo(); boolean oldStatus = (boolean) infoMap.getOrDefault("isRunning", false); if (oldStatus != runStatus) { infoMap.put("isRunning", runStatus); if (paragraphJobListener != null) { paragraphJobListener.noteRunningStatusChange(this.id, runStatus); } } } public void setIsolatedMode(boolean isolatedMode) { info.put("inIsolatedMode", isolatedMode); } public boolean isIsolatedMode() { if (info == null) { return false; } else { return Boolean.parseBoolean( info.getOrDefault("inIsolatedMode", "false").toString()); } } public void setStartTime(String startTime) { info.put("startTime", startTime); } public String getStartTime() { if (info == null) { return null; } else { return info.getOrDefault("startTime", "").toString(); } } public void clearStartTime() { info.remove("startTime"); } /** * Is note running * @return */ public boolean isRunning() { return (boolean) getInfo().getOrDefault("isRunning", false); } @Override public String toString() { if (this.path != null) { return this.path; } else { return "/" + this.name; } } @Override public String toJson() { return noteParser.toJson(this); } public void postProcessParagraphs() { for (Paragraph p : paragraphs) { p.parseText(); p.setNote(this); p.setAuthenticationInfo(AuthenticationInfo.ANONYMOUS); if (p.getStatus() == Status.PENDING) { p.setStatus(Status.ABORT); } if (p.getStatus() == Status.RUNNING && !zConf.isRecoveryEnabled()) { p.setStatus(Status.ABORT); } List<ApplicationState> appStates = p.getAllApplicationStates(); if (appStates != null) { for (ApplicationState app : appStates) { if (app.getStatus() != ApplicationState.Status.ERROR) { app.setStatus(ApplicationState.Status.UNLOADED); } } } } } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Note note = (Note) o; if (paragraphs != null ? !paragraphs.equals(note.paragraphs) : note.paragraphs != null) { return false; } //TODO(zjffdu) exclude path because FolderView.index use Note as key and consider different path //as same note // if (path != null ? !path.equals(note.path) : note.path != null) return false; if (id != null ? !id.equals(note.id) : note.id != null) { return false; } if (angularObjects != null ? !angularObjects.equals(note.angularObjects) : note.angularObjects != null) { return false; } if (config != null ? !config.equals(note.config) : note.config != null) { return false; } return info != null ? info.equals(note.info) : note.info == null; } @Override public int hashCode() { int result = paragraphs != null ? paragraphs.hashCode() : 0; // result = 31 * result + (path != null ? path.hashCode() : 0); result = 31 * result + (id != null ? id.hashCode() : 0); result = 31 * result + (angularObjects != null ? angularObjects.hashCode() : 0); result = 31 * result + (config != null ? config.hashCode() : 0); result = 31 * result + (info != null ? info.hashCode() : 0); return result; } public void setNoteEventListeners(List<NoteEventListener> noteEventListeners) { this.noteEventListeners = noteEventListeners; } public ReentrantReadWriteLock getLock() { return lock; } public void setRemoved(boolean removed) { this.removed = removed; } public boolean isRemoved() { return removed; } public ExecutionContext getExecutionContext() { ExecutionContext executionContext = new ExecutionContext(); executionContext.setNoteId(id); executionContext.setDefaultInterpreterGroup(defaultInterpreterGroup); executionContext.setInIsolatedMode(isIsolatedMode()); executionContext.setStartTime(getStartTime()); return executionContext; } public NoteParser getNoteParser() { return noteParser; } public void setZeppelinConfiguration(ZeppelinConfiguration zConf) { this.zConf = zConf; } public void setNoteParser(NoteParser noteParser) { this.noteParser = noteParser; } }
googleapis/google-cloud-java
35,011
java-containeranalysis/proto-google-cloud-containeranalysis-v1beta1/src/main/java/io/grafeas/v1beta1/pkg/Installation.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/devtools/containeranalysis/v1beta1/package/package.proto // Protobuf Java Version: 3.25.8 package io.grafeas.v1beta1.pkg; /** * * * <pre> * This represents how a particular software package may be installed on a * system. * </pre> * * Protobuf type {@code grafeas.v1beta1.package.Installation} */ public final class Installation extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:grafeas.v1beta1.package.Installation) InstallationOrBuilder { private static final long serialVersionUID = 0L; // Use Installation.newBuilder() to construct. private Installation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Installation() { name_ = ""; location_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new Installation(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return io.grafeas.v1beta1.pkg.PackageOuterClass .internal_static_grafeas_v1beta1_package_Installation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return io.grafeas.v1beta1.pkg.PackageOuterClass .internal_static_grafeas_v1beta1_package_Installation_fieldAccessorTable .ensureFieldAccessorsInitialized( io.grafeas.v1beta1.pkg.Installation.class, io.grafeas.v1beta1.pkg.Installation.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * Output only. The name of the installed package. * </pre> * * <code>string name = 1;</code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Output only. The name of the installed package. * </pre> * * <code>string name = 1;</code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int LOCATION_FIELD_NUMBER = 2; @SuppressWarnings("serial") private java.util.List<io.grafeas.v1beta1.pkg.Location> location_; /** * * * <pre> * Required. All of the places within the filesystem versions of this package * have been found. * </pre> * * <code>repeated .grafeas.v1beta1.package.Location location = 2;</code> */ @java.lang.Override public java.util.List<io.grafeas.v1beta1.pkg.Location> getLocationList() { return location_; } /** * * * <pre> * Required. All of the places within the filesystem versions of this package * have been found. * </pre> * * <code>repeated .grafeas.v1beta1.package.Location location = 2;</code> */ @java.lang.Override public java.util.List<? extends io.grafeas.v1beta1.pkg.LocationOrBuilder> getLocationOrBuilderList() { return location_; } /** * * * <pre> * Required. All of the places within the filesystem versions of this package * have been found. * </pre> * * <code>repeated .grafeas.v1beta1.package.Location location = 2;</code> */ @java.lang.Override public int getLocationCount() { return location_.size(); } /** * * * <pre> * Required. All of the places within the filesystem versions of this package * have been found. * </pre> * * <code>repeated .grafeas.v1beta1.package.Location location = 2;</code> */ @java.lang.Override public io.grafeas.v1beta1.pkg.Location getLocation(int index) { return location_.get(index); } /** * * * <pre> * Required. All of the places within the filesystem versions of this package * have been found. * </pre> * * <code>repeated .grafeas.v1beta1.package.Location location = 2;</code> */ @java.lang.Override public io.grafeas.v1beta1.pkg.LocationOrBuilder getLocationOrBuilder(int index) { return location_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } for (int i = 0; i < location_.size(); i++) { output.writeMessage(2, location_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } for (int i = 0; i < location_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, location_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof io.grafeas.v1beta1.pkg.Installation)) { return super.equals(obj); } io.grafeas.v1beta1.pkg.Installation other = (io.grafeas.v1beta1.pkg.Installation) obj; if (!getName().equals(other.getName())) return false; if (!getLocationList().equals(other.getLocationList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); if (getLocationCount() > 0) { hash = (37 * hash) + LOCATION_FIELD_NUMBER; hash = (53 * hash) + getLocationList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static io.grafeas.v1beta1.pkg.Installation parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static io.grafeas.v1beta1.pkg.Installation parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static io.grafeas.v1beta1.pkg.Installation parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static io.grafeas.v1beta1.pkg.Installation parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static io.grafeas.v1beta1.pkg.Installation parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static io.grafeas.v1beta1.pkg.Installation parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static io.grafeas.v1beta1.pkg.Installation parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static io.grafeas.v1beta1.pkg.Installation parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static io.grafeas.v1beta1.pkg.Installation parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static io.grafeas.v1beta1.pkg.Installation parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static io.grafeas.v1beta1.pkg.Installation parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static io.grafeas.v1beta1.pkg.Installation parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(io.grafeas.v1beta1.pkg.Installation prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * This represents how a particular software package may be installed on a * system. * </pre> * * Protobuf type {@code grafeas.v1beta1.package.Installation} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:grafeas.v1beta1.package.Installation) io.grafeas.v1beta1.pkg.InstallationOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return io.grafeas.v1beta1.pkg.PackageOuterClass .internal_static_grafeas_v1beta1_package_Installation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return io.grafeas.v1beta1.pkg.PackageOuterClass .internal_static_grafeas_v1beta1_package_Installation_fieldAccessorTable .ensureFieldAccessorsInitialized( io.grafeas.v1beta1.pkg.Installation.class, io.grafeas.v1beta1.pkg.Installation.Builder.class); } // Construct using io.grafeas.v1beta1.pkg.Installation.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; if (locationBuilder_ == null) { location_ = java.util.Collections.emptyList(); } else { location_ = null; locationBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return io.grafeas.v1beta1.pkg.PackageOuterClass .internal_static_grafeas_v1beta1_package_Installation_descriptor; } @java.lang.Override public io.grafeas.v1beta1.pkg.Installation getDefaultInstanceForType() { return io.grafeas.v1beta1.pkg.Installation.getDefaultInstance(); } @java.lang.Override public io.grafeas.v1beta1.pkg.Installation build() { io.grafeas.v1beta1.pkg.Installation result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public io.grafeas.v1beta1.pkg.Installation buildPartial() { io.grafeas.v1beta1.pkg.Installation result = new io.grafeas.v1beta1.pkg.Installation(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(io.grafeas.v1beta1.pkg.Installation result) { if (locationBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { location_ = java.util.Collections.unmodifiableList(location_); bitField0_ = (bitField0_ & ~0x00000002); } result.location_ = location_; } else { result.location_ = locationBuilder_.build(); } } private void buildPartial0(io.grafeas.v1beta1.pkg.Installation result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof io.grafeas.v1beta1.pkg.Installation) { return mergeFrom((io.grafeas.v1beta1.pkg.Installation) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(io.grafeas.v1beta1.pkg.Installation other) { if (other == io.grafeas.v1beta1.pkg.Installation.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } if (locationBuilder_ == null) { if (!other.location_.isEmpty()) { if (location_.isEmpty()) { location_ = other.location_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureLocationIsMutable(); location_.addAll(other.location_); } onChanged(); } } else { if (!other.location_.isEmpty()) { if (locationBuilder_.isEmpty()) { locationBuilder_.dispose(); locationBuilder_ = null; location_ = other.location_; bitField0_ = (bitField0_ & ~0x00000002); locationBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getLocationFieldBuilder() : null; } else { locationBuilder_.addAllMessages(other.location_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { io.grafeas.v1beta1.pkg.Location m = input.readMessage(io.grafeas.v1beta1.pkg.Location.parser(), extensionRegistry); if (locationBuilder_ == null) { ensureLocationIsMutable(); location_.add(m); } else { locationBuilder_.addMessage(m); } break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * * * <pre> * Output only. The name of the installed package. * </pre> * * <code>string name = 1;</code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Output only. The name of the installed package. * </pre> * * <code>string name = 1;</code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Output only. The name of the installed package. * </pre> * * <code>string name = 1;</code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Output only. The name of the installed package. * </pre> * * <code>string name = 1;</code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Output only. The name of the installed package. * </pre> * * <code>string name = 1;</code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.util.List<io.grafeas.v1beta1.pkg.Location> location_ = java.util.Collections.emptyList(); private void ensureLocationIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { location_ = new java.util.ArrayList<io.grafeas.v1beta1.pkg.Location>(location_); bitField0_ |= 0x00000002; } } private com.google.protobuf.RepeatedFieldBuilderV3< io.grafeas.v1beta1.pkg.Location, io.grafeas.v1beta1.pkg.Location.Builder, io.grafeas.v1beta1.pkg.LocationOrBuilder> locationBuilder_; /** * * * <pre> * Required. All of the places within the filesystem versions of this package * have been found. * </pre> * * <code>repeated .grafeas.v1beta1.package.Location location = 2;</code> */ public java.util.List<io.grafeas.v1beta1.pkg.Location> getLocationList() { if (locationBuilder_ == null) { return java.util.Collections.unmodifiableList(location_); } else { return locationBuilder_.getMessageList(); } } /** * * * <pre> * Required. All of the places within the filesystem versions of this package * have been found. * </pre> * * <code>repeated .grafeas.v1beta1.package.Location location = 2;</code> */ public int getLocationCount() { if (locationBuilder_ == null) { return location_.size(); } else { return locationBuilder_.getCount(); } } /** * * * <pre> * Required. All of the places within the filesystem versions of this package * have been found. * </pre> * * <code>repeated .grafeas.v1beta1.package.Location location = 2;</code> */ public io.grafeas.v1beta1.pkg.Location getLocation(int index) { if (locationBuilder_ == null) { return location_.get(index); } else { return locationBuilder_.getMessage(index); } } /** * * * <pre> * Required. All of the places within the filesystem versions of this package * have been found. * </pre> * * <code>repeated .grafeas.v1beta1.package.Location location = 2;</code> */ public Builder setLocation(int index, io.grafeas.v1beta1.pkg.Location value) { if (locationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureLocationIsMutable(); location_.set(index, value); onChanged(); } else { locationBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Required. All of the places within the filesystem versions of this package * have been found. * </pre> * * <code>repeated .grafeas.v1beta1.package.Location location = 2;</code> */ public Builder setLocation(int index, io.grafeas.v1beta1.pkg.Location.Builder builderForValue) { if (locationBuilder_ == null) { ensureLocationIsMutable(); location_.set(index, builderForValue.build()); onChanged(); } else { locationBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Required. All of the places within the filesystem versions of this package * have been found. * </pre> * * <code>repeated .grafeas.v1beta1.package.Location location = 2;</code> */ public Builder addLocation(io.grafeas.v1beta1.pkg.Location value) { if (locationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureLocationIsMutable(); location_.add(value); onChanged(); } else { locationBuilder_.addMessage(value); } return this; } /** * * * <pre> * Required. All of the places within the filesystem versions of this package * have been found. * </pre> * * <code>repeated .grafeas.v1beta1.package.Location location = 2;</code> */ public Builder addLocation(int index, io.grafeas.v1beta1.pkg.Location value) { if (locationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureLocationIsMutable(); location_.add(index, value); onChanged(); } else { locationBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Required. All of the places within the filesystem versions of this package * have been found. * </pre> * * <code>repeated .grafeas.v1beta1.package.Location location = 2;</code> */ public Builder addLocation(io.grafeas.v1beta1.pkg.Location.Builder builderForValue) { if (locationBuilder_ == null) { ensureLocationIsMutable(); location_.add(builderForValue.build()); onChanged(); } else { locationBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Required. All of the places within the filesystem versions of this package * have been found. * </pre> * * <code>repeated .grafeas.v1beta1.package.Location location = 2;</code> */ public Builder addLocation(int index, io.grafeas.v1beta1.pkg.Location.Builder builderForValue) { if (locationBuilder_ == null) { ensureLocationIsMutable(); location_.add(index, builderForValue.build()); onChanged(); } else { locationBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Required. All of the places within the filesystem versions of this package * have been found. * </pre> * * <code>repeated .grafeas.v1beta1.package.Location location = 2;</code> */ public Builder addAllLocation( java.lang.Iterable<? extends io.grafeas.v1beta1.pkg.Location> values) { if (locationBuilder_ == null) { ensureLocationIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, location_); onChanged(); } else { locationBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Required. All of the places within the filesystem versions of this package * have been found. * </pre> * * <code>repeated .grafeas.v1beta1.package.Location location = 2;</code> */ public Builder clearLocation() { if (locationBuilder_ == null) { location_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { locationBuilder_.clear(); } return this; } /** * * * <pre> * Required. All of the places within the filesystem versions of this package * have been found. * </pre> * * <code>repeated .grafeas.v1beta1.package.Location location = 2;</code> */ public Builder removeLocation(int index) { if (locationBuilder_ == null) { ensureLocationIsMutable(); location_.remove(index); onChanged(); } else { locationBuilder_.remove(index); } return this; } /** * * * <pre> * Required. All of the places within the filesystem versions of this package * have been found. * </pre> * * <code>repeated .grafeas.v1beta1.package.Location location = 2;</code> */ public io.grafeas.v1beta1.pkg.Location.Builder getLocationBuilder(int index) { return getLocationFieldBuilder().getBuilder(index); } /** * * * <pre> * Required. All of the places within the filesystem versions of this package * have been found. * </pre> * * <code>repeated .grafeas.v1beta1.package.Location location = 2;</code> */ public io.grafeas.v1beta1.pkg.LocationOrBuilder getLocationOrBuilder(int index) { if (locationBuilder_ == null) { return location_.get(index); } else { return locationBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Required. All of the places within the filesystem versions of this package * have been found. * </pre> * * <code>repeated .grafeas.v1beta1.package.Location location = 2;</code> */ public java.util.List<? extends io.grafeas.v1beta1.pkg.LocationOrBuilder> getLocationOrBuilderList() { if (locationBuilder_ != null) { return locationBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(location_); } } /** * * * <pre> * Required. All of the places within the filesystem versions of this package * have been found. * </pre> * * <code>repeated .grafeas.v1beta1.package.Location location = 2;</code> */ public io.grafeas.v1beta1.pkg.Location.Builder addLocationBuilder() { return getLocationFieldBuilder() .addBuilder(io.grafeas.v1beta1.pkg.Location.getDefaultInstance()); } /** * * * <pre> * Required. All of the places within the filesystem versions of this package * have been found. * </pre> * * <code>repeated .grafeas.v1beta1.package.Location location = 2;</code> */ public io.grafeas.v1beta1.pkg.Location.Builder addLocationBuilder(int index) { return getLocationFieldBuilder() .addBuilder(index, io.grafeas.v1beta1.pkg.Location.getDefaultInstance()); } /** * * * <pre> * Required. All of the places within the filesystem versions of this package * have been found. * </pre> * * <code>repeated .grafeas.v1beta1.package.Location location = 2;</code> */ public java.util.List<io.grafeas.v1beta1.pkg.Location.Builder> getLocationBuilderList() { return getLocationFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< io.grafeas.v1beta1.pkg.Location, io.grafeas.v1beta1.pkg.Location.Builder, io.grafeas.v1beta1.pkg.LocationOrBuilder> getLocationFieldBuilder() { if (locationBuilder_ == null) { locationBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< io.grafeas.v1beta1.pkg.Location, io.grafeas.v1beta1.pkg.Location.Builder, io.grafeas.v1beta1.pkg.LocationOrBuilder>( location_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); location_ = null; } return locationBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:grafeas.v1beta1.package.Installation) } // @@protoc_insertion_point(class_scope:grafeas.v1beta1.package.Installation) private static final io.grafeas.v1beta1.pkg.Installation DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new io.grafeas.v1beta1.pkg.Installation(); } public static io.grafeas.v1beta1.pkg.Installation getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<Installation> PARSER = new com.google.protobuf.AbstractParser<Installation>() { @java.lang.Override public Installation parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<Installation> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<Installation> getParserForType() { return PARSER; } @java.lang.Override public io.grafeas.v1beta1.pkg.Installation getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/hive
35,059
ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/TestHiveMetaStoreAuthorizer.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.security.authorization.plugin.metastore; import com.google.common.collect.Lists; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.ColumnType; import org.apache.hadoop.hive.metastore.*; import org.apache.hadoop.hive.metastore.MetaStoreTestUtils; import org.apache.hadoop.hive.metastore.api.*; import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars; import org.apache.hadoop.hive.metastore.client.builder.*; import org.apache.hadoop.hive.metastore.events.*; import org.apache.hadoop.hive.ql.security.HadoopDefaultMetastoreAuthenticator; import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerFactory; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.filtercontext.TableFilterContext; import org.apache.hadoop.security.UserGroupInformation; import org.apache.thrift.TException; import org.junit.Before; import org.junit.FixMethodOrder; import org.junit.Test; import org.junit.runners.MethodSorters; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.io.File; import java.util.stream.Collectors; import java.util.Arrays; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /* Test whether HiveAuthorizer for MetaStore operation is trigger and HiveMetaStoreAuthzInfo is created by HiveMetaStoreAuthorizer */ @FixMethodOrder(MethodSorters.NAME_ASCENDING) public class TestHiveMetaStoreAuthorizer { private static final String dbName = "test"; private static final String tblName = "tmptbl"; private static final String viewName = "tmpview"; private static final String roleName = "tmpRole"; private static final String catalogName = "testCatalog"; private static final String dcName = "testDC"; private static final String unAuthorizedUser = "bob"; static final String authorizedUser = "sam"; private static final String superUser = "hive"; private static final String default_db = "default"; private static final String metaConfVal = ""; private static final String TEST_DATA_DIR = new File("file:///testdata").getPath(); private static final List<String> PARTCOL_SCHEMA = Lists.newArrayList("yyyy", "mm", "dd"); private RawStore rawStore; private Configuration conf; private HMSHandler hmsHandler; private Warehouse wh; static HiveAuthorizer mockHiveAuthorizer; static final List<String> allowedUsers = Arrays.asList("sam", "rob"); @Before public void setUp() throws Exception { conf = MetastoreConf.newMetastoreConf(); MetastoreConf.setBoolVar(conf, ConfVars.HIVE_IN_TEST, true); MetastoreConf.setBoolVar(conf, ConfVars.HIVE_TXN_STATS_ENABLED, true); MetastoreConf.setBoolVar(conf, ConfVars.AGGREGATE_STATS_CACHE_ENABLED, false); MetastoreConf.setVar(conf, ConfVars.PARTITION_NAME_WHITELIST_PATTERN, metaConfVal); MetastoreConf.setLongVar(conf, ConfVars.THRIFT_CONNECTION_RETRIES, 3); MetastoreConf.setBoolVar(conf, ConfVars.HIVE_SUPPORT_CONCURRENCY, false); MetastoreConf.setVar(conf, ConfVars.HIVE_AUTHORIZATION_MANAGER, MockHiveAuthorizerFactory.class.getName()); MetastoreConf.setVar(conf, ConfVars.PRE_EVENT_LISTENERS, HiveMetaStoreAuthorizer.class.getName()); MetastoreConf.setVar(conf, ConfVars.HIVE_METASTORE_AUTHENTICATOR_MANAGER, HadoopDefaultMetastoreAuthenticator.class.getName()); MetastoreConf.setVar(conf, ConfVars.FILTER_HOOK, MockMetaStoreOwnerFilterHook.class.getName()); conf.set("hadoop.proxyuser.hive.groups", "*"); conf.set("hadoop.proxyuser.hive.hosts", "*"); conf.set("hadoop.proxyuser.hive.users", "*"); MetaStoreTestUtils.setConfForStandloneMode(conf); wh = Mockito.spy(new Warehouse(conf)); hmsHandler = new HMSHandler("test", conf); hmsHandler.init(wh); rawStore = new ObjectStore(); rawStore.setConf(hmsHandler.getConf()); // Create the 'hive' catalog with new warehouse directory HMSHandler.createDefaultCatalog(rawStore, new Warehouse(conf)); try { DropDataConnectorRequest dropDcReq = new DropDataConnectorRequest(dcName); dropDcReq.setIfNotExists(true); dropDcReq.setCheckReferences(true); hmsHandler.drop_dataconnector_req(dropDcReq); hmsHandler.drop_table("default", tblName, true); hmsHandler.drop_database(dbName, true, false); hmsHandler.drop_catalog(new DropCatalogRequest(catalogName)); FileUtils.deleteDirectory(new File(TEST_DATA_DIR)); } catch (Exception e) { // NoSuchObjectException will be ignored if the step objects are not there } mockHiveAuthorizer = Mockito.mock(HiveAuthorizer.class); configureMockAuthorizer(); } /** * Configures the mock authorizer to check permissions based on username */ private static void configureMockAuthorizer() throws HiveAuthzPluginException, HiveAccessControlException { doAnswer(invocation -> { HiveOperationType opType = invocation.getArgument(0); String user; try { user = UserGroupInformation.getLoginUser().getShortUserName(); } catch (Exception e) { throw new HiveAuthzPluginException("Unable to get UserGroupInformation"); } if (!allowedUsers.contains(user) && !user.equals(superUser)) { throw new HiveAuthzPluginException("Operation type " + opType + " not allowed for user:" + user); } return null; }).when(mockHiveAuthorizer).checkPrivileges(any(HiveOperationType.class), any(), any(), any(HiveAuthzContext.class)); } /** * Factory class that provides MockHiveAuthorizer instance */ public static class MockHiveAuthorizerFactory implements HiveAuthorizerFactory { @Override public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreClientFactory, HiveConf conf, HiveAuthenticationProvider hiveAuthenticator, HiveAuthzSessionContext ctx) { return mockHiveAuthorizer; } } /** * Captures and returns the privilege objects passed to the authorizer */ private Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> getHivePrivilegeObjectsFromLastCall() throws HiveAuthzPluginException, HiveAccessControlException { @SuppressWarnings("unchecked") Class<List<HivePrivilegeObject>> class_listPrivObjects = (Class) List.class; ArgumentCaptor<List<HivePrivilegeObject>> inputsCapturer = ArgumentCaptor .forClass(class_listPrivObjects); ArgumentCaptor<List<HivePrivilegeObject>> outputsCapturer = ArgumentCaptor .forClass(class_listPrivObjects); verify(mockHiveAuthorizer).checkPrivileges(any(HiveOperationType.class), inputsCapturer.capture(), outputsCapturer.capture(), any(HiveAuthzContext.class)); return new ImmutablePair<>(inputsCapturer.getValue(), outputsCapturer.getValue()); } @Test public void testA_CreateDatabase_unAuthorizedUser() throws Exception { UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(unAuthorizedUser)); try { Database db = new DatabaseBuilder() .setName(dbName) .build(conf); hmsHandler.create_database(db); } catch (Exception e) { String err = e.getMessage(); String expected = "Operation type " + HiveOperationType.CREATEDATABASE + " not allowed for user:" + unAuthorizedUser; assertEquals(expected, err); } } @Test public void testB_CreateTable_unAuthorizedUser() throws Exception { UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(unAuthorizedUser)); try { Table table = new TableBuilder() .setTableName(tblName) .addCol("name", ColumnType.STRING_TYPE_NAME) .setOwner(unAuthorizedUser) .build(conf); hmsHandler.create_table(table); } catch (Exception e) { String err = e.getMessage(); String expected = "Operation type " + HiveOperationType.CREATETABLE + " not allowed for user:" + unAuthorizedUser; assertEquals(expected, err); } } @Test public void testC_CreateView_anyUser() throws Exception { UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(authorizedUser)); try { Table viewObj = new TableBuilder() .setTableName(viewName) .setType(TableType.VIRTUAL_VIEW.name()) .addCol("name", ColumnType.STRING_TYPE_NAME) .setOwner(authorizedUser) .build(conf); hmsHandler.create_table(viewObj); Map<String, String> params = viewObj.getParameters(); assertTrue(params.containsKey("Authorized")); assertTrue("false".equalsIgnoreCase(params.get("Authorized"))); } catch (Exception e) { // no Exceptions for user same as normal user is now allowed CREATE_VIEW operation } } @Test public void testC2_AlterView_anyUser() throws Exception { UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(authorizedUser)); try { Table viewObj = new TableBuilder() .setTableName(viewName) .setType(TableType.VIRTUAL_VIEW.name()) .addCol("name", ColumnType.STRING_TYPE_NAME) .setOwner(authorizedUser) .build(conf); hmsHandler.create_table(viewObj); viewObj = new TableBuilder() .setTableName(viewName) .setType(TableType.VIRTUAL_VIEW.name()) .addCol("dep", ColumnType.STRING_TYPE_NAME) .setOwner(authorizedUser) .build(conf); hmsHandler.alter_table("default", viewName, viewObj); Map<String, String> params = viewObj.getParameters(); assertTrue(params.containsKey("Authorized")); assertTrue("false".equalsIgnoreCase(params.get("Authorized"))); } catch (Exception e) { // no Exceptions for user same as normal user is now allowed Alter_VIEW operation } } @Test public void testD_CreateView_SuperUser() throws Exception { UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(superUser)); try { Table viewObj = new TableBuilder() .setTableName(viewName) .setType(TableType.VIRTUAL_VIEW.name()) .addCol("name", ColumnType.STRING_TYPE_NAME) .build(conf); hmsHandler.create_table(viewObj); } catch (Exception e) { // no Exceptions for superuser as hive is allowed CREATE_VIEW operation } } @Test public void testE_CreateRole__anyUser() throws Exception { UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(authorizedUser)); try { Role role = new RoleBuilder() .setRoleName(roleName) .setOwnerName(authorizedUser) .build(); hmsHandler.create_role(role); } catch (Exception e) { String err = e.getMessage(); String expected = "Operation type " + PreEventContext.PreEventType.AUTHORIZATION_API_CALL.name() + " not allowed for user:" + authorizedUser; assertEquals(expected, err); } } @Test public void testF_CreateCatalog_anyUser() throws Exception { UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(authorizedUser)); try { Catalog catalog = new CatalogBuilder() .setName(catalogName) .setLocation(TEST_DATA_DIR) .build(); hmsHandler.create_catalog(new CreateCatalogRequest(catalog)); } catch (Exception e) { String err = e.getMessage(); String expected = "Operation type " + PreEventContext.PreEventType.CREATE_CATALOG.name() + " not allowed for user:" + authorizedUser; assertEquals(expected, err); } } @Test public void testG_CreateCatalog_SuperUser() throws Exception { UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(superUser)); try { Catalog catalog = new CatalogBuilder() .setName(catalogName) .setLocation(TEST_DATA_DIR) .build(); hmsHandler.create_catalog(new CreateCatalogRequest(catalog)); } catch (Exception e) { // no Exceptions for superuser as hive is allowed CREATE CATALOG operation } } @Test public void testH_CreateDatabase_authorizedUser() throws Exception { UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(authorizedUser)); try { Database db = new DatabaseBuilder() .setName(dbName) .setOwnerName(authorizedUser) .setOwnerType(PrincipalType.USER) .build(conf); hmsHandler.create_database(db); // Verify the mock was called with correct ownership info Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> io = getHivePrivilegeObjectsFromLastCall(); List<HivePrivilegeObject> outputs = io.getRight(); assertEquals("Should have one output for create database", 1, outputs.size()); HivePrivilegeObject dbObj = outputs.get(0); assertEquals("Output object should be a database", HivePrivilegeObject.HivePrivilegeObjectType.DATABASE, dbObj.getType()); } catch (Exception e) { fail("Authorized user should be allowed to create database: " + e.getMessage()); } } @Test public void testI_CreateTable_authorizedUser() throws Exception { UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(authorizedUser)); try { Table table = new TableBuilder() .setTableName(tblName) .addCol("name", ColumnType.STRING_TYPE_NAME) .setOwner(authorizedUser) .build(conf); hmsHandler.create_table(table); Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> io = getHivePrivilegeObjectsFromLastCall(); List<HivePrivilegeObject> outputs = io.getRight(); List<HivePrivilegeObject> tableOutputs = outputs.stream() .filter(o -> o.getType() == HivePrivilegeObject.HivePrivilegeObjectType.TABLE_OR_VIEW) .collect(Collectors.toList()); assertEquals("Should have exactly one table output for create table", 1, tableOutputs.size()); HivePrivilegeObject tableObj = tableOutputs.get(0); assertEquals("Output object should be a table", HivePrivilegeObject.HivePrivilegeObjectType.TABLE_OR_VIEW, tableObj.getType()); } catch (Exception e) { fail("Authorized user should be allowed to create table: " + e.getMessage()); } } @Test public void testJ_AlterTable_AuthorizedUser() throws Exception { UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(authorizedUser)); try { Table table = new TableBuilder() .setTableName(tblName) .addCol("name", ColumnType.STRING_TYPE_NAME) .setOwner(authorizedUser) .build(conf); hmsHandler.create_table(table); Table alteredTable = new TableBuilder() .addCol("dep", ColumnType.STRING_TYPE_NAME) .build(conf); hmsHandler.alter_table("default", tblName, alteredTable); } catch (Exception e) { // No Exception for create table for authorized user } } @Test public void testK_DropTable_authorizedUser() throws Exception { UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(authorizedUser)); try { hmsHandler.drop_table(dbName, tblName, true); } catch (Exception e) { // No Exception for create table for authorized user } } @Test public void testL_DropDatabase_authorizedUser() throws Exception { UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(authorizedUser)); try { hmsHandler.drop_database(dbName, true, true); } catch (Exception e) { // No Exception for dropDatabase for authorized user } } @Test public void testM_DropCatalog_SuperUser() throws Exception { UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(superUser)); try { hmsHandler.drop_catalog(new DropCatalogRequest(catalogName)); } catch (Exception e) { // no Exceptions for superuser as hive is allowed CREATE CATALOG operation } } @Test public void testNShowDatabaseAuthorizedUser() throws Exception { UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(authorizedUser)); try { hmsHandler.get_all_databases(); } catch (Exception e) { // no Exceptions for show database as authorized user. } } @Test public void testOShowDatabaseUnauthorizedUser() throws Exception { UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(unAuthorizedUser)); try { hmsHandler.get_all_databases(); } catch (Exception e) { String err = e.getMessage(); if (StringUtils.isNotEmpty(err)) { assert (true); } } } @Test public void testPShowTablesAuthorizedUser() throws Exception { UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(authorizedUser)); try { hmsHandler.get_all_tables("default"); } catch (Exception e) { // no Exceptions for show tables as authorized user. } } @Test public void testQShowTablesUnauthorizedUser() throws Exception { UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(unAuthorizedUser)); try { hmsHandler.get_all_tables("default"); } catch (Exception e) { String err = e.getMessage(); if (StringUtils.isNotEmpty(err)) { assert (true); } } } @Test public void testGetDatabaseObjects_UnauthorizedUser() throws Exception { UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(unAuthorizedUser)); try { Database db = new DatabaseBuilder() .setName(dbName) .build(conf); hmsHandler.create_database(db); GetDatabaseObjectsRequest request = new GetDatabaseObjectsRequest(); request.setCatalogName("hive"); hmsHandler.get_databases_req(request); } catch (Exception e) { String err = e.getMessage(); assertTrue("Exception message should contain operation type", err.contains("Operation type") && err.contains("not allowed for user:" + unAuthorizedUser)); } finally { UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(superUser)); try { hmsHandler.drop_database(dbName, true, false); } catch (Exception e) { // Ignore cleanup errors } } } @Test public void testGetDatabaseObjects_AuthorizedUser() throws Exception { UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(authorizedUser)); try { Database db = new DatabaseBuilder() .setName(dbName) .setOwnerName(authorizedUser) .build(conf); hmsHandler.create_database(db); GetDatabaseObjectsRequest request = new GetDatabaseObjectsRequest(); request.setCatalogName("hive"); GetDatabaseObjectsResponse response = hmsHandler.get_databases_req(request); assertNotNull("Response should not be null", response); assertNotNull("Databases list should not be null", response.getDatabases()); assertTrue("Should find the created database", response.getDatabases().stream().anyMatch(d -> d.getName().equals(dbName))); } finally { try { hmsHandler.drop_database(dbName, true, false); } catch (Exception e) { // Ignore cleanup errors } } } @Test public void testTableFilterContextWithOwnership() throws Exception { List<TableMeta> tableMetas = new ArrayList<>(); TableMeta ownerTableMeta = new TableMeta(); ownerTableMeta.setCatName("hive"); ownerTableMeta.setDbName(default_db); ownerTableMeta.setTableName("owner_table"); ownerTableMeta.setOwnerName(authorizedUser); ownerTableMeta.setOwnerType(org.apache.hadoop.hive.metastore.api.PrincipalType.USER); tableMetas.add(ownerTableMeta); TableMeta otherTableMeta = new TableMeta(); otherTableMeta.setCatName("hive"); otherTableMeta.setDbName(default_db); otherTableMeta.setTableName("other_table"); otherTableMeta.setOwnerName(unAuthorizedUser); otherTableMeta.setOwnerType(org.apache.hadoop.hive.metastore.api.PrincipalType.USER); tableMetas.add(otherTableMeta); TableFilterContext filterContext = TableFilterContext.createFromTableMetas(default_db, tableMetas); List<Table> tables = filterContext.getTables(); assertEquals("Should have two tables in context", 2, tables.size()); boolean foundOwnerTable = false; boolean foundOtherTable = false; for (Table table : tables) { if (table.getTableName().equals("owner_table")) { foundOwnerTable = true; assertEquals("owner_table should have authorized user as owner", authorizedUser, table.getOwner()); } else if (table.getTableName().equals("other_table")) { foundOtherTable = true; assertEquals("other_table should have unauthorized user as owner", unAuthorizedUser, table.getOwner()); } } assertTrue("owner_table not found in tables", foundOwnerTable); assertTrue("other_table not found in tables", foundOtherTable); HiveMetaStoreAuthzInfo authzInfo = filterContext.getAuthzContext(); List<HivePrivilegeObject> privObjects = authzInfo.getInputHObjs(); assertEquals("Should have two privilege objects", 2, privObjects.size()); foundOwnerTable = false; foundOtherTable = false; for (HivePrivilegeObject obj : privObjects) { if (obj.getObjectName().equals("owner_table")) { foundOwnerTable = true; assertEquals("owner_table privilege object should have authorized user as owner", authorizedUser, obj.getOwnerName()); } else if (obj.getObjectName().equals("other_table")) { foundOtherTable = true; assertEquals("other_table privilege object should have unauthorized user as owner", unAuthorizedUser, obj.getOwnerName()); } } assertTrue("owner_table not found in privilege objects", foundOwnerTable); assertTrue("other_table not found in privilege objects", foundOtherTable); } @Test public void testGetDatabaseObjects_WithPattern() throws Exception { UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(authorizedUser)); String testDb1 = "test_db1"; String testDb2 = "test_db2"; String otherDb = "other_db"; try { // Create test databases Database db1 = new DatabaseBuilder() .setName(testDb1) .setOwnerName(authorizedUser) .build(conf); hmsHandler.create_database(db1); Database db2 = new DatabaseBuilder() .setName(testDb2) .setOwnerName(authorizedUser) .build(conf); hmsHandler.create_database(db2); Database db3 = new DatabaseBuilder() .setName(otherDb) .setOwnerName(authorizedUser) .build(conf); hmsHandler.create_database(db3); // Fetch database objects with pattern GetDatabaseObjectsRequest request = new GetDatabaseObjectsRequest(); request.setCatalogName("hive"); request.setPattern("test_*"); GetDatabaseObjectsResponse response = hmsHandler.get_databases_req(request); assertNotNull("Response should not be null", response); assertNotNull("Databases list should not be null", response.getDatabases()); List<String> dbNames = response.getDatabases().stream() .map(Database::getName) .collect(Collectors.toList()); assertTrue("Should find test_db1", dbNames.contains(testDb1)); assertTrue("Should find test_db2", dbNames.contains(testDb2)); assertFalse("Should not find other_db", dbNames.contains(otherDb)); } finally { try { hmsHandler.drop_database(testDb1, true, false); hmsHandler.drop_database(testDb2, true, false); hmsHandler.drop_database(otherDb, true, false); } catch (Exception e) { // Ignore cleanup errors } } } @Test public void testR_CreateDataConnector_unAuthorizedUser() { UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(unAuthorizedUser)); try { DataConnector connector = new DataConnector(dcName, "mysql", "jdbc:mysql://localhost:3306/hive"); CreateDataConnectorRequest connectorReq = new CreateDataConnectorRequest(connector); hmsHandler.create_dataconnector_req(connectorReq); } catch (Exception e) { String err = e.getMessage(); String expected = "Operation type " + HiveOperationType.CREATEDATACONNECTOR + " not allowed for user:" + unAuthorizedUser; assertEquals(expected, err); } } @Test public void testS_CreateDataConnector_authorizedUser() { UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(authorizedUser)); try { DataConnector connector = new DataConnector(dcName, "mysql", "jdbc:mysql://localhost:3306/hive"); CreateDataConnectorRequest connectorReq = new CreateDataConnectorRequest(connector); hmsHandler.create_dataconnector_req(connectorReq); } catch (Exception e) { fail("testS_CreateDataConnector_authorizedUser() failed with " + e); } } @Test public void testT_AlterDataConnector_AuthorizedUser() { UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(authorizedUser)); try { DataConnector connector = new DataConnector(dcName, "mysql", "jdbc:mysql://localhost:3306/hive"); CreateDataConnectorRequest connectorReq = new CreateDataConnectorRequest(connector); hmsHandler.create_dataconnector_req(connectorReq); DataConnector newConnector = new DataConnector(dcName, "mysql", "jdbc:mysql://localhost:3308/hive"); AlterDataConnectorRequest alterReq = new AlterDataConnectorRequest(dcName, newConnector); hmsHandler.alter_dataconnector_req(alterReq); } catch (Exception e) { fail("testT_AlterDataConnector_AuthorizedUser() failed with " + e); } } @Test public void testU_DropDataConnector_authorizedUser() { UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(authorizedUser)); try { DropDataConnectorRequest dropDcReq = new DropDataConnectorRequest(dcName); dropDcReq.setIfNotExists(true); dropDcReq.setCheckReferences(true); hmsHandler.drop_dataconnector_req(dropDcReq); } catch (Exception e) { fail("testU_DropDataConnector_authorizedUser() failed with " + e); } } /** * Captures and returns the privilege objects for Alter Partition */ private Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> getHivePrivilegeObjectsForAlterPartition() throws HiveAuthzPluginException, HiveAccessControlException { @SuppressWarnings("unchecked") Class<List<HivePrivilegeObject>> class_listPrivObjects = (Class) List.class; ArgumentCaptor<List<HivePrivilegeObject>> inputsCapturer = ArgumentCaptor .forClass(class_listPrivObjects); ArgumentCaptor<List<HivePrivilegeObject>> outputsCapturer = ArgumentCaptor .forClass(class_listPrivObjects); verify(mockHiveAuthorizer).checkPrivileges(eq(HiveOperationType.ALTERPARTITION_FILEFORMAT), inputsCapturer.capture(), outputsCapturer.capture(), any(HiveAuthzContext.class)); return new ImmutablePair<>(inputsCapturer.getValue(), outputsCapturer.getValue()); } @Test public void testV_AlterPartition_DFSUriPrivObject() { UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(authorizedUser)); try { List<List<String>> testValues = createTable4PartColsParts(); List<Partition> oldParts = hmsHandler.get_partitions(dbName, tblName, (short) -1); Partition oldPart = oldParts.get(3); Partition newPart = makeTestChangesOnPartition(oldPart); hmsHandler.rename_partition(dbName, tblName,oldPart.getValues(),newPart); Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> io = getHivePrivilegeObjectsForAlterPartition(); List<HivePrivilegeObject> outputs = io.getRight(); List<HivePrivilegeObject> tableOutputs = outputs.stream() .filter(o -> o.getType() == HivePrivilegeObject.HivePrivilegeObjectType.DFS_URI) .collect(Collectors.toList()); assertEquals("Should have one DFS_URI privilege object", 1, tableOutputs.size()); HivePrivilegeObject DFSUriObj = tableOutputs.get(0); assertEquals("DFS_URI should be same as new partition location", oldPart.getSd().getLocation()+ "/hh=01", DFSUriObj.getObjectName()); } catch (Exception e) { fail("testV_AlterPartition_DFSUriPrivObject() failed with " + e); } } protected Table createPartitionedTestTable(String dbName, String tableName, List<String> partCols, boolean setPartitionLevelPrivilages) throws Exception { Database db = new DatabaseBuilder() .setName(dbName) .build(conf); hmsHandler.create_database(db); TableBuilder builder = new TableBuilder() .setDbName(dbName) .setTableName(tableName) .addCol("id", "int") .addCol("name", "string"); partCols.forEach(col -> builder.addPartCol(col, "string")); Table table = builder.build(conf); hmsHandler.create_table(table); return table; } protected List<List<String>> createTable4PartColsParts() throws Exception { Table table = createPartitionedTestTable(dbName, tblName, PARTCOL_SCHEMA, false); List<List<String>> testValues = Lists.newArrayList( Lists.newArrayList("1999", "01", "02"), Lists.newArrayList("2009", "02", "10"), Lists.newArrayList("2017", "10", "26"), Lists.newArrayList("2017", "11", "27")); for (List<String> vals : testValues) { addPartition(table, vals); } return testValues; } protected void addPartition(Table table, List<String> values) throws TException { PartitionBuilder partitionBuilder = new PartitionBuilder().inTable(table); values.forEach(val -> partitionBuilder.addValue(val)); hmsHandler.add_partition(partitionBuilder.build(conf)); } protected static Partition makeTestChangesOnPartition(Partition partition) { Partition newPart = new Partition(partition); newPart.getParameters().put("hmsTestParam001", "testValue001"); newPart.getSd().setLocation(partition.getSd().getLocation() + "/hh=01"); newPart.setValues(Lists.newArrayList("2018", "11", "27")); return newPart; } @Test public void testUnAuthorizedCause() { UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(unAuthorizedUser)); try { Database db = new DatabaseBuilder() .setName(dbName) .build(conf); hmsHandler.create_database(db); } catch (Exception e) { // Check if the exception chain contains HiveAuthzPluginException Throwable current = e; boolean foundAuthzException = false; while (current != null) { if (current instanceof HiveAuthzPluginException) { foundAuthzException = true; String expectedErrMsg = "Operation type " + HiveOperationType.CREATEDATABASE + " not allowed for user:" + unAuthorizedUser; assertTrue("Expected error message mismatch. Actual: '" + current.getMessage() + "'", current.getMessage().contains(expectedErrMsg)); break; } current = current.getCause(); } if (!foundAuthzException) { String expectedErrMsg = "Operation type " + HiveOperationType.CREATEDATABASE + " not allowed for user:" + unAuthorizedUser; assertTrue("Expected HiveAuthzPluginException in exception chain. Message: '" + e.getMessage() + "'", e.getMessage().contains(expectedErrMsg)); } } } @Test public void testDropTableNoTablePathWritePermissionShouldFail() throws Exception { UserGroupInformation.setLoginUser( UserGroupInformation.createRemoteUser(authorizedUser)); Table table = new TableBuilder() .setTableName(tblName) .addCol("name", ColumnType.STRING_TYPE_NAME) .setOwner(authorizedUser) .build(conf); hmsHandler.create_table(table); Path tablePath = new Path(table.getSd().getLocation()); when(wh.isWritable(Mockito.eq(tablePath.getParent()))).thenReturn(true); when(wh.isWritable(Mockito.eq(tablePath))).thenReturn(false); try { hmsHandler.drop_table("default", tblName, true); } catch (MetaException e) { String expected = "%s metadata not deleted since %s is not writable by %s" .formatted("Table", tablePath.toString(), authorizedUser); assertEquals(expected, e.getMessage()); } } }
apache/kylin
35,009
src/kylin-it/src/test/java/org/apache/kylin/newten/NBitmapFunctionTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kylin.newten; import java.io.File; import java.sql.SQLException; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.stream.Collectors; import org.apache.commons.collections.ListUtils; import org.apache.commons.io.FileUtils; import org.apache.kylin.engine.spark.NLocalWithSparkSessionTest; import org.apache.kylin.job.util.JobContextUtil; import org.apache.kylin.util.ExecAndComp; import org.apache.spark.sql.Column; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.apache.spark.sql.udaf.BitmapSerAndDeSer; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.roaringbitmap.longlong.Roaring64NavigableMap; public class NBitmapFunctionTest extends NLocalWithSparkSessionTest { @Override @Before public void setUp() throws Exception { JobContextUtil.cleanUp(); super.setUp(); JobContextUtil.getJobContext(getTestConfig()); populateSSWithCSVData(getTestConfig(), getProject(), ss); } @Override @After public void tearDown() throws Exception { JobContextUtil.cleanUp(); cleanupTestMetadata(); FileUtils.deleteQuietly(new File("../kylin-it/metastore_db")); } @Override public String getProject() { return "intersect_count"; } @Test public void testBitmapFunction() throws Exception { fullBuild("741ca86a-1f13-46da-a59f-95fb68615e3b"); fullBuild("741ca86a-1f13-46da-a59f-95fb68615e3z"); testDateType(); testMultiMeasures(); testCommomCase1(); testCommomCase2(); testWithUnion(); testWithLimit(); testIntersectCountByCol(); testIntersectCountByColMultiRows(); testIntersectCount(); testIntersectValue(); testExplodeIntersectValue(); testHllcCanNotAnswerBitmapUUID(); testSubtractBitmapValue(); testSubtractBitmapUUID(); testBitmapBuild(); testIntersectBimapUuidFunc(); testUnionBimapUuidFunc(); testBimapUuidToArrayFunc(); } private void testBitmapBuild() throws SQLException { List<String> result; //================= constant case String query1 = "select bitmap_build(1)"; result = ExecAndComp.queryModel(getProject(), query1).collectAsList().stream() .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList()); Assert.assertEquals("AAAAAAEAAAAAOjAAAAEAAAAAAAAAEAAAAAEA", result.get(0)); //================= normal case String query2 = "select CAL_DT, bitmap_build(TEST_COUNT_DISTINCT_BITMAP) as first_day " + "from test_kylin_fact where CAL_DT in (date'2012-01-01',date'2012-01-02',date'2012-01-03') " + "group by CAL_DT order by CAL_DT"; result = ExecAndComp.queryModel(getProject(), query2).collectAsList().stream() .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList()); Assert.assertEquals("2012-01-01,AAAAAAEAAAAAOzAAAAEAAA0AAQABAA0A", result.get(0)); Assert.assertEquals("2012-01-02,AAAAAAEAAAAAOzAAAAEAAAkAAgAFAAAADwAIAA==", result.get(1)); Assert.assertEquals("2012-01-03,AAAAAAEAAAAAOjAAAAEAAAAAAAQAEAAAABMAGAAZABoAGwA=", result.get(2)); //================= pushdown case String query3 = "select CAL_DT, bitmap_build(LEAF_CATEG_ID) from test_kylin_fact " + "where CAL_DT in (date'2012-01-01',date'2012-01-02',date'2012-01-03') group by CAL_DT " + "order by CAL_DT"; result = ExecAndComp.querySparkSql(query3).collectAsList().stream().map(row -> row.toSeq().mkString(",")) .collect(Collectors.toList()); Assert.assertEquals( "2012-01-01,AAAAAAEAAAAAOjAAAAMAAAAAAAUAAQABAAIAAwAgAAAALAAAADAAAADDA0UFIi0FUPKK4/LFc7h1yiVkQ05shq4=", result.get(0)); Assert.assertEquals("2012-01-02,AAAAAAEAAAAAOjAAAAIAAAAAAAYAAQACABgAAAAmAAAATQVKJ31ABVDdX3uckfmRJ7h1CpM=", result.get(1)); Assert.assertEquals("2012-01-03,AAAAAAEAAAAAOjAAAAMAAAAAAAEAAQAAAAIAAQAgAAAAJAAAACYAAADSJIFRkSdaXuWn", result.get(2)); } private void testDateType() throws SQLException { String query = "select CAL_DT, " + "intersect_count(TEST_COUNT_DISTINCT_BITMAP, CAL_DT, array[date'2012-01-01']) as first_day, " + "intersect_count(TEST_COUNT_DISTINCT_BITMAP, CAL_DT, array[date'2012-01-02']) as second_day, " + "intersect_count(TEST_COUNT_DISTINCT_BITMAP, CAL_DT, array[date'2012-01-03']) as third_day, " + "intersect_count(TEST_COUNT_DISTINCT_BITMAP, CAL_DT, array[date'2012-01-01',date'2012-01-02']) as retention_oneday, " + "intersect_count(TEST_COUNT_DISTINCT_BITMAP, CAL_DT, array[date'2012-01-01',date'2012-01-02',date'2012-01-03']) as retention_twoday " + "from test_kylin_fact " + "where CAL_DT in (date'2012-01-01',date'2012-01-02',date'2012-01-03') " + "group by CAL_DT " + "order by CAL_DT "; List<String> result = ExecAndComp.queryModel(getProject(), query).collectAsList().stream() .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList()); Assert.assertEquals("2012-01-01,14,0,0,0,0", result.get(0)); Assert.assertEquals("2012-01-02,0,10,0,0,0", result.get(1)); Assert.assertEquals("2012-01-03,0,0,5,0,0", result.get(2)); } private void testMultiMeasures() throws SQLException { String query = "select week_beg_dt as week, " + "intersect_count( TEST_COUNT_DISTINCT_BITMAP, lstg_format_name, array['FP-GTC']) as a, " + "intersect_count( TEST_COUNT_DISTINCT_BITMAP, lstg_format_name, array['Auction']) as b, " + "intersect_count( TEST_COUNT_DISTINCT_BITMAP, lstg_format_name, array['Others']) as c, " + "intersect_count( TEST_COUNT_DISTINCT_BITMAP, lstg_format_name, array['FP-GTC', 'Auction']) as ab, " + "intersect_count( TEST_COUNT_DISTINCT_BITMAP, lstg_format_name, array['FP-GTC', 'Others']) as ac, " + "intersect_count( TEST_COUNT_DISTINCT_BITMAP, lstg_format_name, array['FP-GTC', 'Auction', 'Others']) as abc, " + "count(distinct TEST_COUNT_DISTINCT_BITMAP) as sellers, count(*) as cnt " + "from test_kylin_fact left join edw.test_cal_dt on test_kylin_fact.cal_dt = edw.test_cal_dt.CAL_DT " + "where week_beg_dt in (DATE '2013-12-22', DATE '2012-06-23') group by week_beg_dt order by week_beg_dt"; List<String> result = ExecAndComp.queryModel(getProject(), query).collectAsList().stream() .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList()); Assert.assertEquals("2012-06-23,21,17,13,0,0,0,90,94", result.get(0)); Assert.assertEquals("2013-12-22,18,22,13,0,0,0,98,99", result.get(1)); } private void testCommomCase1() throws SQLException { String query = "select LSTG_FORMAT_NAME, " + "intersect_count(TEST_COUNT_DISTINCT_BITMAP, CAL_DT, array[date'2012-01-01']) as first_day, " + "intersect_count(TEST_COUNT_DISTINCT_BITMAP, CAL_DT, array[date'2012-01-02']) as second_day, " + "intersect_count(TEST_COUNT_DISTINCT_BITMAP, CAL_DT, array[date'2012-01-03']) as third_day, " + "intersect_count(TEST_COUNT_DISTINCT_BITMAP, CAL_DT, array[date'2012-01-01',date'2012-01-02']) as retention_oneday, " + "intersect_count(TEST_COUNT_DISTINCT_BITMAP, CAL_DT, array[date'2012-01-01',date'2012-01-02',date'2012-01-03']) as retention_twoday " + "from test_kylin_fact where CAL_DT in (date'2012-01-01',date'2012-01-02',date'2012-01-03') " + "group by LSTG_FORMAT_NAME order by LSTG_FORMAT_NAME"; List<String> result = ExecAndComp.queryModel(getProject(), query).collectAsList().stream() .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList()); Assert.assertEquals("ABIN,6,4,2,0,0", result.get(0)); Assert.assertEquals("Auction,4,3,1,0,0", result.get(1)); Assert.assertEquals("FP-GTC,2,2,0,0,0", result.get(2)); Assert.assertEquals("FP-non GTC,2,1,0,0,0", result.get(3)); Assert.assertEquals("Others,0,0,2,0,0", result.get(4)); } private void testCommomCase2() throws SQLException { String query4 = "select LEAF_CATEG_ID, " + "intersect_count(TEST_COUNT_DISTINCT_BITMAP, CAL_DT, array[date'2012-01-01']) as first_day " + "from test_kylin_fact where CAL_DT in (date'2012-01-01',date'2012-01-02',date'2012-01-03') " + "group by LEAF_CATEG_ID " + "order by LEAF_CATEG_ID"; List<String> result = ExecAndComp.queryModel(getProject(), query4).collectAsList().stream() .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList()); Assert.assertEquals("963,1", result.get(0)); Assert.assertEquals("1349,1", result.get(1)); Assert.assertEquals("11554,1", result.get(2)); Assert.assertEquals("20485,1", result.get(3)); Assert.assertEquals("35570,1", result.get(4)); Assert.assertEquals("62179,2", result.get(5)); Assert.assertEquals("95173,1", result.get(6)); Assert.assertEquals("95672,2", result.get(7)); Assert.assertEquals("140746,1", result.get(8)); Assert.assertEquals("148324,1", result.get(9)); Assert.assertEquals("158798,1", result.get(10)); Assert.assertEquals("175750,1", result.get(11)); } private void testWithUnion() throws SQLException { String query = "SELECT (SELECT '2012-01-01') AS sdate, " + " intersect_count(TEST_COUNT_DISTINCT_BITMAP, cal_dt, array[date'2012-01-01',date'2012-01-01'])," + " intersect_count(TEST_COUNT_DISTINCT_BITMAP, cal_dt, array[date'2012-01-01',date'2012-01-02'])," + " intersect_count(TEST_COUNT_DISTINCT_BITMAP, cal_dt, array[date'2012-01-01',date'2012-01-03'])" + "FROM test_kylin_fact WHERE cal_dt >= date '2012-01-01' AND cal_dt < date'2012-01-07' " + "UNION ALL " + "SELECT (SELECT '2012-01-02') AS sdate, " + " intersect_count(TEST_COUNT_DISTINCT_BITMAP, cal_dt, array[date'2012-01-02',date'2012-01-02'])," + " intersect_count(TEST_COUNT_DISTINCT_BITMAP, cal_dt, array[date'2012-01-02',date'2012-01-03'])," + " intersect_count(TEST_COUNT_DISTINCT_BITMAP, cal_dt, array[date'2012-01-02',date'2012-01-04'])" + "FROM test_kylin_fact WHERE cal_dt >= date '2012-01-02' AND cal_dt < date'2012-01-07'" + "order by sdate"; List<String> result = ExecAndComp.queryModel(getProject(), query).collectAsList().stream() .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList()); Assert.assertEquals("2012-01-01,14,1,0", result.get(0)); Assert.assertEquals("2012-01-02,10,1,0", result.get(1)); } private void testWithLimit() throws SQLException { String query = "select intersect_count(TEST_COUNT_DISTINCT_BITMAP, CAL_DT, array[date'2012-01-01']) as first_day " + "from test_kylin_fact " + "limit 1"; List<String> result = ExecAndComp.queryModel(getProject(), query).collectAsList().stream() .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList()); Assert.assertEquals("14", result.get(0)); } private void testIntersectCountByColMultiRows() throws SQLException { String query1 = "select intersect_count_by_col(Array[t1.a1]), LSTG_FORMAT_NAME from " + " (select bitmap_uuid(SELLER_ID) as a1, LSTG_FORMAT_NAME " + " from TEST_KYLIN_FACT group by LSTG_FORMAT_NAME) t1 order by LSTG_FORMAT_NAME"; List<String> result1 = ExecAndComp.queryModel(getProject(), query1).collectAsList().stream() .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList()); Assert.assertEquals("855,ABIN", result1.get(0)); Assert.assertEquals("896,Auction", result1.get(1)); Assert.assertEquals("858,FP-GTC", result1.get(2)); Assert.assertEquals("870,FP-non GTC", result1.get(3)); Assert.assertEquals("855,Others", result1.get(4)); } private void testIntersectCountByCol() throws Exception { String query1 = "select intersect_count_by_col(Array[t1.a1,t2.a2]) from " + " (select bitmap_uuid(SELLER_ID) as a1 " + " from TEST_KYLIN_FACT) t1, " + " (select intersect_bitmap_uuid( " + " SELLER_ID, LSTG_FORMAT_NAME, " + " array['FP-GTC|FP-non GTC', 'Others']) as a2 " + "from TEST_KYLIN_FACT) t2 " + "union all " + "select intersect_count_by_col(Array[t1.a1,t2.a2]) from " + " (select bitmap_uuid(SELLER_ID) as a1 " + " from TEST_KYLIN_FACT) t1, " + " (select intersect_bitmap_uuid_v2( " + " SELLER_ID, LSTG_FORMAT_NAME, " + " array['FP-.*GTC', 'Others'], 'REGEXP') as a2 " + "from TEST_KYLIN_FACT) t2 " + "union all " + "select intersect_count_by_col(Array[t1.a1,t2.a2]) from " + " (select bitmap_uuid(SELLER_ID) as a1 " + " from TEST_KYLIN_FACT) t1, " + " (select intersect_bitmap_uuid_v2( " + " SELLER_ID, LSTG_FORMAT_NAME, " + " array['FP-GTC|FP-non GTC', 'Others'], 'RAWSTRING') as a2 " + "from TEST_KYLIN_FACT) t2"; List<String> result1 = ExecAndComp.queryModel(getProject(), query1).collectAsList().stream() .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList()); Assert.assertEquals("841", result1.get(0)); Assert.assertEquals("841", result1.get(1)); Assert.assertEquals("841", result1.get(2)); String query2 = "select intersect_count_by_col(Array[t1.a1,t2.a2]) from " + " (select bitmap_uuid(TEST_COUNT_DISTINCT_BITMAP) as a1 " + " from TEST_KYLIN_FACT) t1, " + " (select intersect_bitmap_uuid( " + " TEST_COUNT_DISTINCT_BITMAP, LSTG_FORMAT_NAME, " + " array['FP-GTC|FP-non GTC', 'Others']) as a2 " + "from TEST_KYLIN_FACT) t2 " + "union all " + "select intersect_count_by_col(Array[t1.a1,t2.a2]) from " + " (select bitmap_uuid(TEST_COUNT_DISTINCT_BITMAP) as a1 " + " from TEST_KYLIN_FACT) t1, " + " (select intersect_bitmap_uuid_v2( " + " TEST_COUNT_DISTINCT_BITMAP, LSTG_FORMAT_NAME, " + " array['FP-.*GTC', 'Others'], 'REGEXP') as a2 " + "from TEST_KYLIN_FACT) t2 " + "union all " + "select intersect_count_by_col(Array[t1.a1,t2.a2]) from " + " (select bitmap_uuid(TEST_COUNT_DISTINCT_BITMAP) as a1 " + " from TEST_KYLIN_FACT) t1, " + " (select intersect_bitmap_uuid_v2( " + " TEST_COUNT_DISTINCT_BITMAP, LSTG_FORMAT_NAME, " + " array['FP-GTC|FP-non GTC', 'Others'], 'RAWSTRING') as a2 " + "from TEST_KYLIN_FACT) t2"; List<String> result2 = ExecAndComp.queryModel(getProject(), query2).collectAsList().stream() .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList()); Assert.assertEquals("862", result2.get(0)); Assert.assertEquals("862", result2.get(1)); Assert.assertEquals("862", result2.get(2)); } private void testIntersectCount() throws SQLException { String query = "select " + "intersect_count(TEST_COUNT_DISTINCT_BITMAP, lstg_format_name, array['FP-GTC|FP-non GTC', 'Others']) as a, " + "intersect_count_v2(TEST_COUNT_DISTINCT_BITMAP, LSTG_FORMAT_NAME, array['FP-.*GTC', 'Others'], 'REGEXP') as b, " + "intersect_count_v2(TEST_COUNT_DISTINCT_BITMAP, LSTG_FORMAT_NAME, array['FP-GTC|FP-non GTC', 'Others'], 'RAWSTRING') as c " + "from test_kylin_fact"; List<String> result = ExecAndComp.queryModel(getProject(), query).collectAsList().stream() .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList()); Assert.assertEquals("862,862,862", result.get(0)); } private void testIntersectValue() throws SQLException { String query = "select " + "intersect_value(LSTG_SITE_ID, lstg_format_name, array['FP-GTC|FP-non GTC', 'Others']) as a, " + "intersect_value_v2(LSTG_SITE_ID, LSTG_FORMAT_NAME, array['FP-.*GTC', 'Others'], 'REGEXP') as b, " + "intersect_value_v2(LSTG_SITE_ID, LSTG_FORMAT_NAME, array['FP-GTC|FP-non GTC', 'Others'], 'RAWSTRING') as c " + "from test_kylin_fact "; List<String> result = ExecAndComp.queryModel(getProject(), query).collectAsList().stream() .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList()); Assert.assertEquals("WrappedArray(0, 2, 3, 15, 23, 100, 101, 211)," + "WrappedArray(0, 2, 3, 15, 23, 100, 101, 211)," + "WrappedArray(0, 2, 3, 15, 23, 100, 101, 211)", result.get(0)); } private void testExplodeIntersectValue() throws SQLException { String query = "select " + "explode(intersect_value(LSTG_SITE_ID, lstg_format_name, array['FP-GTC|FP-non GTC', 'Others'])) as a " + "from test_kylin_fact "; List<String> result = ExecAndComp.queryModel(getProject(), query).collectAsList().stream() .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList()); Assert.assertEquals("0", result.get(0)); Assert.assertEquals("2", result.get(1)); Assert.assertEquals("3", result.get(2)); Assert.assertEquals("15", result.get(3)); Assert.assertEquals("23", result.get(4)); Assert.assertEquals("100", result.get(5)); Assert.assertEquals("101", result.get(6)); Assert.assertEquals("211", result.get(7)); } private void testHllcCanNotAnswerBitmapUUID() throws SQLException { String query = "select intersect_count_by_col(Array[t1.a1]), LSTG_FORMAT_NAME from" + " (select bitmap_uuid(SELLER_ID) as a1, LSTG_FORMAT_NAME from TEST_KYLIN_FACT group by LSTG_FORMAT_NAME) t1" + " order by LSTG_FORMAT_NAME"; List<String> result = ExecAndComp.queryModel(getProject(), query).collectAsList().stream() .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList()); Assert.assertEquals("855,ABIN", result.get(0)); Assert.assertEquals("896,Auction", result.get(1)); Assert.assertEquals("858,FP-GTC", result.get(2)); Assert.assertEquals("870,FP-non GTC", result.get(3)); Assert.assertEquals("855,Others", result.get(4)); } private void testSubtractBitmapValue() throws SQLException { String query = "select subtract_bitmap_value(" + "intersect_bitmap_uuid_v2(SELLER_ID, LSTG_FORMAT_NAME, array['FP-GTC|FP-non GTC', 'Others'], 'RAWSTRING')," + "intersect_bitmap_uuid_v2(SELLER_ID, LSTG_FORMAT_NAME, array['ABIN', 'Auction'], 'RAWSTRING'))" + "from TEST_KYLIN_FACT"; List<Integer> acutal = ExecAndComp.queryModel(getProject(), query).collectAsList().get(0).getList(0).stream() .map(row -> Integer.parseInt(row.toString())).collect(Collectors.toList()); Dataset<Row> fg = ss.sql("select distinct SELLER_ID from TEST_KYLIN_FACT where LSTG_FORMAT_NAME = 'FP-GTC'"); Dataset<Row> fng = ss .sql("select distinct SELLER_ID from TEST_KYLIN_FACT where LSTG_FORMAT_NAME = 'FP-non GTC'"); Dataset<Row> ot = ss.sql("select distinct SELLER_ID from TEST_KYLIN_FACT where LSTG_FORMAT_NAME = 'Others'"); Dataset<Row> ab = ss.sql("select distinct SELLER_ID from TEST_KYLIN_FACT where LSTG_FORMAT_NAME = 'ABIN'"); Dataset<Row> au = ss.sql("select distinct SELLER_ID from TEST_KYLIN_FACT where LSTG_FORMAT_NAME = 'Auction'"); List<Integer> expect = fg.union(fng).intersect(ot).except(ab.intersect(au)).sort(new Column("SELLER_ID")) .collectAsList().stream().map(row -> row.getInt(0)).collect(Collectors.toList()); Assert.assertEquals(expect.size(), acutal.size()); for (int i = 0; i < acutal.size(); i++) { Assert.assertEquals(expect.get(i), acutal.get(i)); } } private void testSubtractBitmapUUID() throws SQLException { String query = "select intersect_count_by_col(Array[t1.a1, t2.a2]) from (select subtract_bitmap_uuid(" + "intersect_bitmap_uuid_v2(SELLER_ID, LSTG_FORMAT_NAME, array['FP-GTC|FP-non GTC', 'Others'], 'RAWSTRING')," + "intersect_bitmap_uuid_v2(SELLER_ID, LSTG_FORMAT_NAME, array['ABIN', 'Auction'], 'RAWSTRING')) as a1 " + "from TEST_KYLIN_FACT) t1, (select bitmap_uuid(SELLER_ID) as a2 from TEST_KYLIN_FACT) t2"; List<String> result = ExecAndComp.queryModel(getProject(), query).collectAsList().stream() .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList()); Assert.assertEquals("210", result.get(0)); } private void testIntersectBimapUuidFunc() throws Exception { Dataset<Row> expect = ss .sql("select LSTG_FORMAT_NAME,collect_set( SELLER_ID) from TEST_KYLIN_FACT group by LSTG_FORMAT_NAME"); List<Row> rows = expect.collectAsList(); List<Object> list0 = rows.get(0).getList(1); List<Object> list1 = rows.get(1).getList(1); List<Object> list2 = rows.get(2).getList(1); List<Object> list3 = rows.get(3).getList(1); List<Object> list4 = rows.get(4).getList(1); List intersection = ListUtils.intersection(list0, list1); intersection = ListUtils.intersection(intersection, list2); intersection = ListUtils.intersection(intersection, list3); intersection = ListUtils.intersection(intersection, list4); Collections.sort(intersection); String countSql = "select intersect_bitmap_uuid_count(uuid) uuid from ( " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['ABIN']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['Auction']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['FP-GTC']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['FP-non GTC']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['Others']) as uuid " + "from TEST_KYLIN_FACT) t1"; int countResult = ExecAndComp.queryModel(getProject(), countSql).collectAsList().get(0).getInt(0); Assert.assertEquals(intersection.size(), countResult); String valueSql = "select intersect_bitmap_uuid_value_all(uuid) uuid from ( " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['ABIN']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['Auction']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['FP-GTC']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['FP-non GTC']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['Others']) as uuid " + "from TEST_KYLIN_FACT) t1"; List<Object> valueResult = ExecAndComp.queryModel(getProject(), valueSql).collectAsList().get(0).getList(0); for (int i = 0; i < intersection.size(); i++) { Assert.assertEquals(intersection.get(i).toString(), valueResult.get(i).toString()); } String valueTmp = "select intersect_bitmap_uuid_value(uuid,limit,offset) uuid from ( " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['ABIN']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['Auction']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['FP-GTC']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['FP-non GTC']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['Others']) as uuid " + "from TEST_KYLIN_FACT ) t1"; int limit = 100; int offset = 0; int pageSize = intersection.size() / limit + 1; for (int page = 1; page <= pageSize; page++) { offset = (page - 1) * limit; valueSql = valueTmp.replace("limit", limit + "").replace("offset", offset + ""); valueResult = ExecAndComp.queryModel(getProject(), valueSql).collectAsList().get(0).getList(0); for (int i = 0; i < valueResult.size(); i++) { Assert.assertEquals(intersection.get(offset).toString(), valueResult.get(i).toString()); offset += 1; } } String ep = "both limit and offset must be >= 0"; // test limit < 0 final String valueSql2 = valueTmp.replace("limit", -1 + "").replace("offset", offset + ""); Assert.assertThrows(ep, SQLException.class, () -> ExecAndComp.queryModel(getProject(), valueSql2).collectAsList().get(0).getList(0)); // test offset < 0 final String valueSql3 = valueTmp.replace("limit", 100 + "").replace("offset", -1 + ""); Assert.assertThrows(ep, SQLException.class, () -> ExecAndComp.queryModel(getProject(), valueSql3).collectAsList().get(0).getList(0)); String distinctSql = "select intersect_bitmap_uuid_distinct(uuid) uuid from ( " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['ABIN']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['Auction']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['FP-GTC']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['FP-non GTC']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['Others']) as uuid " + "from TEST_KYLIN_FACT ) t1"; byte[] distinctResult = (byte[]) ExecAndComp.queryModel(getProject(), distinctSql).collectAsList().get(0) .get(0); Roaring64NavigableMap bitmap = BitmapSerAndDeSer.get().deserialize(distinctResult); int i = 0; Iterator<Long> iterator = bitmap.iterator(); while (iterator.hasNext()) { Long next = iterator.next(); Assert.assertEquals(next.toString(), intersection.get(i).toString()); i++; } } private void testUnionBimapUuidFunc() throws Exception { Dataset<Row> totalIdQuery = ss.sql("select distinct SELLER_ID from TEST_KYLIN_FACT order by SELLER_ID asc"); List<Integer> tatalIds = totalIdQuery.collectAsList().stream().map(row -> row.getInt(0)) .collect(Collectors.toList()); String countSql = "select union_bitmap_uuid_count(uuid) uuid from ( " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['ABIN']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['Auction']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['FP-GTC']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['FP-non GTC']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['Others']) as uuid " + "from TEST_KYLIN_FACT) t1"; int countResult = ExecAndComp.queryModel(getProject(), countSql).collectAsList().get(0).getInt(0); Assert.assertEquals(tatalIds.size(), countResult); String valueSql = "select union_bitmap_uuid_value_all(uuid) uuid from ( " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['ABIN']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['Auction']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['FP-GTC']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['FP-non GTC']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['Others']) as uuid " + "from TEST_KYLIN_FACT ) t1"; List<Object> valueResult = ExecAndComp.queryModel(getProject(), valueSql).collectAsList().get(0).getList(0); for (int i = 0; i < tatalIds.size(); i++) { Assert.assertEquals(tatalIds.get(i).toString(), valueResult.get(i).toString()); } String valueTmp = "select union_bitmap_uuid_value(uuid,limit,offset) uuid from ( " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['ABIN']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['Auction']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['FP-GTC']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['FP-non GTC']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['Others']) as uuid " + "from TEST_KYLIN_FACT ) t1"; int limit = 100; int offset = 0; int pageSize = tatalIds.size() / limit + 1; for (int page = 1; page <= pageSize; page++) { offset = (page - 1) * limit; valueSql = valueTmp.replace("limit", limit + "").replace("offset", offset + ""); valueResult = ExecAndComp.queryModel(getProject(), valueSql).collectAsList().get(0).getList(0); for (int i = 0; i < valueResult.size(); i++) { Assert.assertEquals(tatalIds.get(offset).toString(), valueResult.get(i).toString()); offset += 1; } } String distinctSql = "select union_bitmap_uuid_distinct(uuid) uuid from ( " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['ABIN']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['Auction']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['FP-GTC']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['FP-non GTC']) as uuid " + "from TEST_KYLIN_FACT UNION " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['Others']) as uuid " + "from TEST_KYLIN_FACT) t1"; byte[] distinctResult = (byte[]) ExecAndComp.queryModel(getProject(), distinctSql).collectAsList().get(0) .get(0); Roaring64NavigableMap bitmap = BitmapSerAndDeSer.get().deserialize(distinctResult); int i = 0; Iterator<Long> iterator = bitmap.iterator(); while (iterator.hasNext()) { Long next = iterator.next(); Assert.assertEquals(next.toString(), tatalIds.get(i).toString()); i++; } } private void testBimapUuidToArrayFunc() throws Exception { String arraySql = "select bitmap_uuid_to_array(uuid) uuid from ( " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['Others']) as uuid " + "from TEST_KYLIN_FACT) t1"; List<Object> ArrayResult = ExecAndComp.queryModel(getProject(), arraySql).collectAsList().get(0).getList(0); String valueSql = "select intersect_bitmap_uuid_value_all(uuid) uuid from ( " + "select intersect_bitmap_uuid(SELLER_ID, LSTG_FORMAT_NAME, array['Others']) as uuid " + "from TEST_KYLIN_FACT) t1"; List<Object> valueResult = ExecAndComp.queryModel(getProject(), valueSql).collectAsList().get(0).getList(0); for (int i = 0; i < ArrayResult.size(); i++) { Assert.assertEquals(ArrayResult.get(i).toString(), valueResult.get(i).toString()); } } }
googleapis/google-cloud-java
35,333
java-compute/google-cloud-compute/src/main/java/com/google/cloud/compute/v1/stub/VpnGatewaysStubSettings.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.compute.v1.stub; import static com.google.cloud.compute.v1.VpnGatewaysClient.AggregatedListPagedResponse; import static com.google.cloud.compute.v1.VpnGatewaysClient.ListPagedResponse; import com.google.api.core.ApiFunction; import com.google.api.core.ApiFuture; import com.google.api.core.ObsoleteApi; import com.google.api.gax.core.GaxProperties; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.httpjson.GaxHttpJsonProperties; import com.google.api.gax.httpjson.HttpJsonTransportChannel; import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider; import com.google.api.gax.httpjson.ProtoOperationTransformers; import com.google.api.gax.longrunning.OperationSnapshot; import com.google.api.gax.longrunning.OperationTimedPollAlgorithm; import com.google.api.gax.retrying.RetrySettings; import com.google.api.gax.rpc.ApiCallContext; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallSettings; import com.google.api.gax.rpc.PageContext; import com.google.api.gax.rpc.PagedCallSettings; import com.google.api.gax.rpc.PagedListDescriptor; import com.google.api.gax.rpc.PagedListResponseFactory; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.compute.v1.AggregatedListVpnGatewaysRequest; import com.google.cloud.compute.v1.DeleteVpnGatewayRequest; import com.google.cloud.compute.v1.GetStatusVpnGatewayRequest; import com.google.cloud.compute.v1.GetVpnGatewayRequest; import com.google.cloud.compute.v1.InsertVpnGatewayRequest; import com.google.cloud.compute.v1.ListVpnGatewaysRequest; import com.google.cloud.compute.v1.Operation; import com.google.cloud.compute.v1.SetLabelsVpnGatewayRequest; import com.google.cloud.compute.v1.TestIamPermissionsVpnGatewayRequest; import com.google.cloud.compute.v1.TestPermissionsResponse; import com.google.cloud.compute.v1.VpnGateway; import com.google.cloud.compute.v1.VpnGatewayAggregatedList; import com.google.cloud.compute.v1.VpnGatewayList; import com.google.cloud.compute.v1.VpnGatewaysGetStatusResponse; import com.google.cloud.compute.v1.VpnGatewaysScopedList; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import java.io.IOException; import java.time.Duration; import java.util.List; import java.util.Map; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link VpnGatewaysStub}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (compute.googleapis.com) and default port (443) are used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the * [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings) * of get: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * VpnGatewaysStubSettings.Builder vpnGatewaysSettingsBuilder = * VpnGatewaysStubSettings.newBuilder(); * vpnGatewaysSettingsBuilder * .getSettings() * .setRetrySettings( * vpnGatewaysSettingsBuilder * .getSettings() * .getRetrySettings() * .toBuilder() * .setInitialRetryDelayDuration(Duration.ofSeconds(1)) * .setInitialRpcTimeoutDuration(Duration.ofSeconds(5)) * .setMaxAttempts(5) * .setMaxRetryDelayDuration(Duration.ofSeconds(30)) * .setMaxRpcTimeoutDuration(Duration.ofSeconds(60)) * .setRetryDelayMultiplier(1.3) * .setRpcTimeoutMultiplier(1.5) * .setTotalTimeoutDuration(Duration.ofSeconds(300)) * .build()); * VpnGatewaysStubSettings vpnGatewaysSettings = vpnGatewaysSettingsBuilder.build(); * }</pre> * * Please refer to the [Client Side Retry * Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for * additional support in setting retries. * * <p>To configure the RetrySettings of a Long Running Operation method, create an * OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to * configure the RetrySettings for delete: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * VpnGatewaysStubSettings.Builder vpnGatewaysSettingsBuilder = * VpnGatewaysStubSettings.newBuilder(); * TimedRetryAlgorithm timedRetryAlgorithm = * OperationalTimedPollAlgorithm.create( * RetrySettings.newBuilder() * .setInitialRetryDelayDuration(Duration.ofMillis(500)) * .setRetryDelayMultiplier(1.5) * .setMaxRetryDelayDuration(Duration.ofMillis(5000)) * .setTotalTimeoutDuration(Duration.ofHours(24)) * .build()); * vpnGatewaysSettingsBuilder * .createClusterOperationSettings() * .setPollingAlgorithm(timedRetryAlgorithm) * .build(); * }</pre> */ @Generated("by gapic-generator-java") public class VpnGatewaysStubSettings extends StubSettings<VpnGatewaysStubSettings> { /** The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder() .add("https://www.googleapis.com/auth/compute") .add("https://www.googleapis.com/auth/cloud-platform") .build(); private final PagedCallSettings< AggregatedListVpnGatewaysRequest, VpnGatewayAggregatedList, AggregatedListPagedResponse> aggregatedListSettings; private final UnaryCallSettings<DeleteVpnGatewayRequest, Operation> deleteSettings; private final OperationCallSettings<DeleteVpnGatewayRequest, Operation, Operation> deleteOperationSettings; private final UnaryCallSettings<GetVpnGatewayRequest, VpnGateway> getSettings; private final UnaryCallSettings<GetStatusVpnGatewayRequest, VpnGatewaysGetStatusResponse> getStatusSettings; private final UnaryCallSettings<InsertVpnGatewayRequest, Operation> insertSettings; private final OperationCallSettings<InsertVpnGatewayRequest, Operation, Operation> insertOperationSettings; private final PagedCallSettings<ListVpnGatewaysRequest, VpnGatewayList, ListPagedResponse> listSettings; private final UnaryCallSettings<SetLabelsVpnGatewayRequest, Operation> setLabelsSettings; private final OperationCallSettings<SetLabelsVpnGatewayRequest, Operation, Operation> setLabelsOperationSettings; private final UnaryCallSettings<TestIamPermissionsVpnGatewayRequest, TestPermissionsResponse> testIamPermissionsSettings; private static final PagedListDescriptor< AggregatedListVpnGatewaysRequest, VpnGatewayAggregatedList, Map.Entry<String, VpnGatewaysScopedList>> AGGREGATED_LIST_PAGE_STR_DESC = new PagedListDescriptor< AggregatedListVpnGatewaysRequest, VpnGatewayAggregatedList, Map.Entry<String, VpnGatewaysScopedList>>() { @Override public String emptyToken() { return ""; } @Override public AggregatedListVpnGatewaysRequest injectToken( AggregatedListVpnGatewaysRequest payload, String token) { return AggregatedListVpnGatewaysRequest.newBuilder(payload) .setPageToken(token) .build(); } @Override public AggregatedListVpnGatewaysRequest injectPageSize( AggregatedListVpnGatewaysRequest payload, int pageSize) { return AggregatedListVpnGatewaysRequest.newBuilder(payload) .setMaxResults(pageSize) .build(); } @Override public Integer extractPageSize(AggregatedListVpnGatewaysRequest payload) { return payload.getMaxResults(); } @Override public String extractNextToken(VpnGatewayAggregatedList payload) { return payload.getNextPageToken(); } @Override public Iterable<Map.Entry<String, VpnGatewaysScopedList>> extractResources( VpnGatewayAggregatedList payload) { return payload.getItemsMap().entrySet(); } }; private static final PagedListDescriptor<ListVpnGatewaysRequest, VpnGatewayList, VpnGateway> LIST_PAGE_STR_DESC = new PagedListDescriptor<ListVpnGatewaysRequest, VpnGatewayList, VpnGateway>() { @Override public String emptyToken() { return ""; } @Override public ListVpnGatewaysRequest injectToken( ListVpnGatewaysRequest payload, String token) { return ListVpnGatewaysRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListVpnGatewaysRequest injectPageSize( ListVpnGatewaysRequest payload, int pageSize) { return ListVpnGatewaysRequest.newBuilder(payload).setMaxResults(pageSize).build(); } @Override public Integer extractPageSize(ListVpnGatewaysRequest payload) { return payload.getMaxResults(); } @Override public String extractNextToken(VpnGatewayList payload) { return payload.getNextPageToken(); } @Override public Iterable<VpnGateway> extractResources(VpnGatewayList payload) { return payload.getItemsList(); } }; private static final PagedListResponseFactory< AggregatedListVpnGatewaysRequest, VpnGatewayAggregatedList, AggregatedListPagedResponse> AGGREGATED_LIST_PAGE_STR_FACT = new PagedListResponseFactory< AggregatedListVpnGatewaysRequest, VpnGatewayAggregatedList, AggregatedListPagedResponse>() { @Override public ApiFuture<AggregatedListPagedResponse> getFuturePagedResponse( UnaryCallable<AggregatedListVpnGatewaysRequest, VpnGatewayAggregatedList> callable, AggregatedListVpnGatewaysRequest request, ApiCallContext context, ApiFuture<VpnGatewayAggregatedList> futureResponse) { PageContext< AggregatedListVpnGatewaysRequest, VpnGatewayAggregatedList, Map.Entry<String, VpnGatewaysScopedList>> pageContext = PageContext.create(callable, AGGREGATED_LIST_PAGE_STR_DESC, request, context); return AggregatedListPagedResponse.createAsync(pageContext, futureResponse); } }; private static final PagedListResponseFactory< ListVpnGatewaysRequest, VpnGatewayList, ListPagedResponse> LIST_PAGE_STR_FACT = new PagedListResponseFactory< ListVpnGatewaysRequest, VpnGatewayList, ListPagedResponse>() { @Override public ApiFuture<ListPagedResponse> getFuturePagedResponse( UnaryCallable<ListVpnGatewaysRequest, VpnGatewayList> callable, ListVpnGatewaysRequest request, ApiCallContext context, ApiFuture<VpnGatewayList> futureResponse) { PageContext<ListVpnGatewaysRequest, VpnGatewayList, VpnGateway> pageContext = PageContext.create(callable, LIST_PAGE_STR_DESC, request, context); return ListPagedResponse.createAsync(pageContext, futureResponse); } }; /** Returns the object with the settings used for calls to aggregatedList. */ public PagedCallSettings< AggregatedListVpnGatewaysRequest, VpnGatewayAggregatedList, AggregatedListPagedResponse> aggregatedListSettings() { return aggregatedListSettings; } /** Returns the object with the settings used for calls to delete. */ public UnaryCallSettings<DeleteVpnGatewayRequest, Operation> deleteSettings() { return deleteSettings; } /** Returns the object with the settings used for calls to delete. */ public OperationCallSettings<DeleteVpnGatewayRequest, Operation, Operation> deleteOperationSettings() { return deleteOperationSettings; } /** Returns the object with the settings used for calls to get. */ public UnaryCallSettings<GetVpnGatewayRequest, VpnGateway> getSettings() { return getSettings; } /** Returns the object with the settings used for calls to getStatus. */ public UnaryCallSettings<GetStatusVpnGatewayRequest, VpnGatewaysGetStatusResponse> getStatusSettings() { return getStatusSettings; } /** Returns the object with the settings used for calls to insert. */ public UnaryCallSettings<InsertVpnGatewayRequest, Operation> insertSettings() { return insertSettings; } /** Returns the object with the settings used for calls to insert. */ public OperationCallSettings<InsertVpnGatewayRequest, Operation, Operation> insertOperationSettings() { return insertOperationSettings; } /** Returns the object with the settings used for calls to list. */ public PagedCallSettings<ListVpnGatewaysRequest, VpnGatewayList, ListPagedResponse> listSettings() { return listSettings; } /** Returns the object with the settings used for calls to setLabels. */ public UnaryCallSettings<SetLabelsVpnGatewayRequest, Operation> setLabelsSettings() { return setLabelsSettings; } /** Returns the object with the settings used for calls to setLabels. */ public OperationCallSettings<SetLabelsVpnGatewayRequest, Operation, Operation> setLabelsOperationSettings() { return setLabelsOperationSettings; } /** Returns the object with the settings used for calls to testIamPermissions. */ public UnaryCallSettings<TestIamPermissionsVpnGatewayRequest, TestPermissionsResponse> testIamPermissionsSettings() { return testIamPermissionsSettings; } public VpnGatewaysStub createStub() throws IOException { if (getTransportChannelProvider() .getTransportName() .equals(HttpJsonTransportChannel.getHttpJsonTransportName())) { return HttpJsonVpnGatewaysStub.create(this); } throw new UnsupportedOperationException( String.format( "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns the default service name. */ @Override public String getServiceName() { return "compute"; } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstantiatingExecutorProvider.newBuilder(); } /** Returns the default service endpoint. */ @ObsoleteApi("Use getEndpoint() instead") public static String getDefaultEndpoint() { return "compute.googleapis.com:443"; } /** Returns the default mTLS service endpoint. */ public static String getDefaultMtlsEndpoint() { return "compute.mtls.googleapis.com:443"; } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return GoogleCredentialsProvider.newBuilder() .setScopesToApply(DEFAULT_SERVICE_SCOPES) .setUseJwtAccessWithScope(true); } /** Returns a builder for the default ChannelProvider for this service. */ public static InstantiatingHttpJsonChannelProvider.Builder defaultHttpJsonTransportProviderBuilder() { return InstantiatingHttpJsonChannelProvider.newBuilder(); } public static TransportChannelProvider defaultTransportChannelProvider() { return defaultHttpJsonTransportProviderBuilder().build(); } public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken( "gapic", GaxProperties.getLibraryVersion(VpnGatewaysStubSettings.class)) .setTransportToken( GaxHttpJsonProperties.getHttpJsonTokenName(), GaxHttpJsonProperties.getHttpJsonVersion()); } /** Returns a new builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected VpnGatewaysStubSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); aggregatedListSettings = settingsBuilder.aggregatedListSettings().build(); deleteSettings = settingsBuilder.deleteSettings().build(); deleteOperationSettings = settingsBuilder.deleteOperationSettings().build(); getSettings = settingsBuilder.getSettings().build(); getStatusSettings = settingsBuilder.getStatusSettings().build(); insertSettings = settingsBuilder.insertSettings().build(); insertOperationSettings = settingsBuilder.insertOperationSettings().build(); listSettings = settingsBuilder.listSettings().build(); setLabelsSettings = settingsBuilder.setLabelsSettings().build(); setLabelsOperationSettings = settingsBuilder.setLabelsOperationSettings().build(); testIamPermissionsSettings = settingsBuilder.testIamPermissionsSettings().build(); } /** Builder for VpnGatewaysStubSettings. */ public static class Builder extends StubSettings.Builder<VpnGatewaysStubSettings, Builder> { private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders; private final PagedCallSettings.Builder< AggregatedListVpnGatewaysRequest, VpnGatewayAggregatedList, AggregatedListPagedResponse> aggregatedListSettings; private final UnaryCallSettings.Builder<DeleteVpnGatewayRequest, Operation> deleteSettings; private final OperationCallSettings.Builder<DeleteVpnGatewayRequest, Operation, Operation> deleteOperationSettings; private final UnaryCallSettings.Builder<GetVpnGatewayRequest, VpnGateway> getSettings; private final UnaryCallSettings.Builder< GetStatusVpnGatewayRequest, VpnGatewaysGetStatusResponse> getStatusSettings; private final UnaryCallSettings.Builder<InsertVpnGatewayRequest, Operation> insertSettings; private final OperationCallSettings.Builder<InsertVpnGatewayRequest, Operation, Operation> insertOperationSettings; private final PagedCallSettings.Builder< ListVpnGatewaysRequest, VpnGatewayList, ListPagedResponse> listSettings; private final UnaryCallSettings.Builder<SetLabelsVpnGatewayRequest, Operation> setLabelsSettings; private final OperationCallSettings.Builder<SetLabelsVpnGatewayRequest, Operation, Operation> setLabelsOperationSettings; private final UnaryCallSettings.Builder< TestIamPermissionsVpnGatewayRequest, TestPermissionsResponse> testIamPermissionsSettings; private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions = ImmutableMap.builder(); definitions.put( "retry_policy_0_codes", ImmutableSet.copyOf( Lists.<StatusCode.Code>newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); definitions.put( "no_retry_1_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList())); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder(); RetrySettings settings = null; settings = RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(100L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelayDuration(Duration.ofMillis(60000L)) .setInitialRpcTimeoutDuration(Duration.ofMillis(600000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ofMillis(600000L)) .setTotalTimeoutDuration(Duration.ofMillis(600000L)) .build(); definitions.put("retry_policy_0_params", settings); settings = RetrySettings.newBuilder() .setInitialRpcTimeoutDuration(Duration.ofMillis(600000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ofMillis(600000L)) .setTotalTimeoutDuration(Duration.ofMillis(600000L)) .build(); definitions.put("no_retry_1_params", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(clientContext); aggregatedListSettings = PagedCallSettings.newBuilder(AGGREGATED_LIST_PAGE_STR_FACT); deleteSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); deleteOperationSettings = OperationCallSettings.newBuilder(); getSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); getStatusSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); insertSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); insertOperationSettings = OperationCallSettings.newBuilder(); listSettings = PagedCallSettings.newBuilder(LIST_PAGE_STR_FACT); setLabelsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); setLabelsOperationSettings = OperationCallSettings.newBuilder(); testIamPermissionsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( aggregatedListSettings, deleteSettings, getSettings, getStatusSettings, insertSettings, listSettings, setLabelsSettings, testIamPermissionsSettings); initDefaults(this); } protected Builder(VpnGatewaysStubSettings settings) { super(settings); aggregatedListSettings = settings.aggregatedListSettings.toBuilder(); deleteSettings = settings.deleteSettings.toBuilder(); deleteOperationSettings = settings.deleteOperationSettings.toBuilder(); getSettings = settings.getSettings.toBuilder(); getStatusSettings = settings.getStatusSettings.toBuilder(); insertSettings = settings.insertSettings.toBuilder(); insertOperationSettings = settings.insertOperationSettings.toBuilder(); listSettings = settings.listSettings.toBuilder(); setLabelsSettings = settings.setLabelsSettings.toBuilder(); setLabelsOperationSettings = settings.setLabelsOperationSettings.toBuilder(); testIamPermissionsSettings = settings.testIamPermissionsSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( aggregatedListSettings, deleteSettings, getSettings, getStatusSettings, insertSettings, listSettings, setLabelsSettings, testIamPermissionsSettings); } private static Builder createDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder initDefaults(Builder builder) { builder .aggregatedListSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .deleteSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); builder .getSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .getStatusSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .insertSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); builder .listSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .setLabelsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); builder .testIamPermissionsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); builder .deleteOperationSettings() .setInitialCallSettings( UnaryCallSettings .<DeleteVpnGatewayRequest, OperationSnapshot>newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(Operation.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(Operation.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(500L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelayDuration(Duration.ofMillis(20000L)) .setInitialRpcTimeoutDuration(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ZERO) .setTotalTimeoutDuration(Duration.ofMillis(600000L)) .build())); builder .insertOperationSettings() .setInitialCallSettings( UnaryCallSettings .<InsertVpnGatewayRequest, OperationSnapshot>newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(Operation.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(Operation.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(500L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelayDuration(Duration.ofMillis(20000L)) .setInitialRpcTimeoutDuration(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ZERO) .setTotalTimeoutDuration(Duration.ofMillis(600000L)) .build())); builder .setLabelsOperationSettings() .setInitialCallSettings( UnaryCallSettings .<SetLabelsVpnGatewayRequest, OperationSnapshot>newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(Operation.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(Operation.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(500L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelayDuration(Duration.ofMillis(20000L)) .setInitialRpcTimeoutDuration(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ZERO) .setTotalTimeoutDuration(Duration.ofMillis(600000L)) .build())); return builder; } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); return this; } public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() { return unaryMethodSettingsBuilders; } /** Returns the builder for the settings used for calls to aggregatedList. */ public PagedCallSettings.Builder< AggregatedListVpnGatewaysRequest, VpnGatewayAggregatedList, AggregatedListPagedResponse> aggregatedListSettings() { return aggregatedListSettings; } /** Returns the builder for the settings used for calls to delete. */ public UnaryCallSettings.Builder<DeleteVpnGatewayRequest, Operation> deleteSettings() { return deleteSettings; } /** Returns the builder for the settings used for calls to delete. */ public OperationCallSettings.Builder<DeleteVpnGatewayRequest, Operation, Operation> deleteOperationSettings() { return deleteOperationSettings; } /** Returns the builder for the settings used for calls to get. */ public UnaryCallSettings.Builder<GetVpnGatewayRequest, VpnGateway> getSettings() { return getSettings; } /** Returns the builder for the settings used for calls to getStatus. */ public UnaryCallSettings.Builder<GetStatusVpnGatewayRequest, VpnGatewaysGetStatusResponse> getStatusSettings() { return getStatusSettings; } /** Returns the builder for the settings used for calls to insert. */ public UnaryCallSettings.Builder<InsertVpnGatewayRequest, Operation> insertSettings() { return insertSettings; } /** Returns the builder for the settings used for calls to insert. */ public OperationCallSettings.Builder<InsertVpnGatewayRequest, Operation, Operation> insertOperationSettings() { return insertOperationSettings; } /** Returns the builder for the settings used for calls to list. */ public PagedCallSettings.Builder<ListVpnGatewaysRequest, VpnGatewayList, ListPagedResponse> listSettings() { return listSettings; } /** Returns the builder for the settings used for calls to setLabels. */ public UnaryCallSettings.Builder<SetLabelsVpnGatewayRequest, Operation> setLabelsSettings() { return setLabelsSettings; } /** Returns the builder for the settings used for calls to setLabels. */ public OperationCallSettings.Builder<SetLabelsVpnGatewayRequest, Operation, Operation> setLabelsOperationSettings() { return setLabelsOperationSettings; } /** Returns the builder for the settings used for calls to testIamPermissions. */ public UnaryCallSettings.Builder<TestIamPermissionsVpnGatewayRequest, TestPermissionsResponse> testIamPermissionsSettings() { return testIamPermissionsSettings; } @Override public VpnGatewaysStubSettings build() throws IOException { return new VpnGatewaysStubSettings(this); } } }
oracle/coherence
34,933
prj/coherence-core/src/main/java/com/tangosol/coherence/reporter/ReportBatch.java
/* * Copyright (c) 2000, 2022, Oracle and/or its affiliates. * * Licensed under the Universal Permissive License v 1.0 as shown at * https://oss.oracle.com/licenses/upl. */ package com.tangosol.coherence.reporter; import com.tangosol.net.CacheFactory; import com.tangosol.net.management.MBeanHelper; import com.tangosol.run.xml.XmlDocument; import com.tangosol.run.xml.XmlElement; import com.tangosol.run.xml.XmlHelper; import com.tangosol.util.Base; import com.tangosol.util.TaskDaemon; import java.io.File; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.concurrent.atomic.AtomicReference; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import com.oracle.coherence.common.base.Blocking; import javax.management.openmbean.CompositeDataSupport; import javax.management.openmbean.CompositeType; import javax.management.openmbean.OpenDataException; import javax.management.openmbean.OpenType; import javax.management.openmbean.SimpleType; import javax.management.openmbean.TabularData; import javax.management.openmbean.TabularDataSupport; import javax.management.openmbean.TabularType; /** * Management class to continually run the reporting process. * * * @author ew 2008.01.28 * @since Coherence 3.4 */ public class ReportBatch extends Base implements Runnable, ReportControl { //----- constructor ---------------------------------------------------- /** * Default constructor. */ public ReportBatch() { } /** * Continually execute a group of reports from the configuration file. * <pre> * Example: * java com.tangosol.coherence.reporter.ReportBatch config-file * </pre> * Note: to run a group of reports programmaticly, one could to do the following: * <code> * new ReportBatch().start(); * </code> */ public static void main(String[] asArg) { if (asArg.length == 0) { showUsage(); return; } String sFile = asArg[0]; System.setProperty("coherence.management.report.configuration", sFile); System.setProperty("coherence.management.report.autostart", "true"); System.setProperty("coherence.management.report.distributed", "false"); while (true) { CacheFactory.ensureCluster(); try { Blocking.sleep(5000); } catch (InterruptedException e) { Thread.interrupted(); break; } } } //----- ReportBatch methods ---------------------------------------------- /** * Run the report batch. */ public void run() { long ldtStart = System.currentTimeMillis(); ReportBatch model = this; Map map = m_mapReporters; Reporter reporter; // refresh settings if they have changed model.setCurrentBatch(model.getCurrentBatch() + 1); long nBatchId = model.getCurrentBatch(); String[] aReport = model.getReports(); XmlElement[] aParam = model.getParams(); String sPath = m_sOutputDir; long cReports = aReport.length; if (model.setState(/*sExpectState*/ STATE_WAITING, /*sNewState*/ STATE_RUNNING) || model.setState(/*sExpectState*/ STATE_STARTED, /*sNewState*/ STATE_RUNNING)) { model.setLastExecutionMillis(System.currentTimeMillis()); for (int i = 0; i < cReports; i++) { Integer nKey = Integer.valueOf(i); String sDefFile = aReport[i]; XmlElement xmlParam = aParam[i]; reporter = (Reporter) map.get(nKey); if (reporter == null) { reporter = new Reporter(); map.put(nKey, reporter); } reporter.setDateFormat(m_dateFormat); model.setLastReport(sDefFile); reporter.run(sDefFile, sPath, nBatchId, xmlParam, ReportBatch.class.getClassLoader()); } updateStats(ldtStart); model.setState(/*sExpectState*/ STATE_RUNNING, /*sNewState*/ STATE_WAITING); } } /** * Output usage instructions. */ public static void showUsage() { out(); out("java com.tangosol.coherence.reporter.ReportBatch <config-file>"); out(); out("command option descriptions:"); out("\t<config-file> the file containing the report configuration XML"); out(); } // ----- ReportControl methods ------------------------------------------- /** * Obtain the daemon for the Reporter task. * * @return the daemon the Reporter is executing */ public TaskDaemon getDaemon() { return m_daemon; } /** * Set the daemon for the Reporter task. * * @param daemon the daemon the Reporter is executing */ public void setDaemon(TaskDaemon daemon) { m_daemon = daemon; } /** * Check to see if the execution thread is running. * * @return the thread running the reporter */ public boolean isRunning() { return m_fRun; } /** * Set the last report executed. * * @param sLastReport the last Report Executed */ public void setLastReport(String sLastReport) { m_sLastReport = sLastReport; } /** * Set the list of reports in the execution list. * * @param asReports the report execution list */ public void setReports(String[] asReports) { m_asReports = asReports; } /** * Set the last time a report was executed. * * @param ldtTime the last time a reported executed as a long */ public void setLastExecutionMillis(long ldtTime) { m_ldtLastExecutionMillis = ldtTime; } /** * Set the last time a report was executed. * * @return the last time a reported executed as a long */ public long getLastExecutionMillis() { return m_ldtLastExecutionMillis; } /** * Set the state of the reporter. * * @param sState the state of the reporter */ public void setState(String sState) { m_refState.set(sState); } /** * Compare and Set the state of the reporter. * * @param sExpectState the expected state of the reporter * @param sNewState the new state to set * * @return true if the state is set successfully */ public boolean setState(String sExpectState, String sNewState) { return m_refState.compareAndSet(sExpectState, sNewState); } /** * Get the batch configuration XML that conforms to batch-config.xml. * * @return the batch configuration XML */ public XmlDocument getXml() { return m_xml; } /** * Set the batch configuration XML that conforms to batch-config.xml. * * @param xml the XML configuration for the Reporter */ public void setXml(XmlDocument xml) { m_xml = xml; } /** * Convert the batch configuration XML to an array for the MBean. * * @param xmlReports the batch configuration report list * * @return the array of report configuration file names */ private String[] makeReportArray(XmlElement xmlReports) { List listReports = xmlReports.getElementList(); String[] asReports = new String[listReports.size()]; XmlElement[] axmlParam = new XmlElement[listReports.size()]; int c = 0; for (Iterator i = listReports.iterator(); i.hasNext();) { XmlElement o = (XmlElement)i.next(); asReports[c] = o.getSafeElement(TAG_LOCATION).getString(); axmlParam[c] = o.getElement(TAG_PARAMS); c++; } m_aParams = axmlParam; return asReports; } //----- ReportControl Interface ---------------------------------------- /** * {@inheritDoc} */ public long getCurrentBatch() { return m_nCurrentBatch; } /** * {@inheritDoc} */ public void setCurrentBatch(long nNewBatch) { m_nCurrentBatch = nNewBatch; } /** * {@inheritDoc} */ public long getIntervalSeconds() { return m_nInterval / 1000; } /** * {@inheritDoc} */ public String getOutputPath() { return m_sOutputDir == null ? "" : new File(m_sOutputDir).getAbsolutePath(); } /** * {@inheritDoc} */ public void setOutputPath(String sPath) { m_sOutputDir = sPath; } /** * {@inheritDoc} */ public void setIntervalSeconds(long nInterval) { m_nInterval = nInterval * 1000; } /** * {@inheritDoc} */ public String getConfigFile() { return m_sConfigFile; } /** * {@inheritDoc} */ public String getState() { return m_refState.get(); } /** * {@inheritDoc} */ public boolean isAutoStart() { return getDependencies().isAutoStart(); } /** * {@inheritDoc} */ public void stop() { MBeanHelper.checkReadOnly("stop"); if (!getState().equals(STATE_ERROR)) { synchronized (this) { if (isRunning()) { m_fRun = false; setState(STATE_STOPPING); getDaemon().stop(); setState(STATE_STOPPED); setDaemon(null); Base.log("Management Reporting - Stopped"); } } } } /** * {@inheritDoc} */ public void start() { MBeanHelper.checkReadOnly("start"); if (getState().equals(STATE_ERROR)) { Base.log("Management Reporting - " + "An unrecoverable error has occurred. Reporter not started."); } else { synchronized (this) { TaskDaemon daemon = getDaemon(); if (m_daemon == null && m_sConfigFile != null) { ReportBatch oReport = this; daemon = new TaskDaemon("Reporter"); daemon.schedulePeriodicTask(oReport, System.currentTimeMillis() + getIntervalSeconds() * 1000, getIntervalSeconds() * 1000); Base.log("Management Reporting - Started"); daemon.start(); setState(STATE_STARTED); m_fRun = true; } setDaemon(daemon); } } } /** * {@inheritDoc} */ public String[] getReports() { return m_asReports; } /** * {@inheritDoc} */ public XmlElement[] getParams() { return m_aParams; } /** * Set the array of XML elements for the initialization parameters. * * @param aXml array of xml elements */ public void setParams(XmlElement[] aXml) { m_aParams = aXml; } /** * {@inheritDoc} */ public void setConfigFile(String sInputFilename) { try { synchronized (this) { m_mapReporters = new HashMap(); m_sConfigFile = sInputFilename; XmlDocument xml = XmlHelper.loadFileOrResource( sInputFilename, "Reporter configuration", ReportBatch.class.getClassLoader()); XmlHelper.replaceSystemProperties(xml, "system-property"); setXml(xml); setOutputPath(xml.getSafeElement(TAG_DIR).getString("")); setIntervalSeconds(Base.parseTime(xml.getSafeElement(TAG_FREQ).getString(DEFAULT_FREQ))/1000); m_asReports = makeReportArray(xml.getSafeElement(TAG_LIST)); } } catch (Exception e) // FileNotFoundException { setState(STATE_ERROR); Base.log("Failed to start Reporter " + e); m_asReports = new String[0]; } } /** * {@inheritDoc} */ public void runReport(String sReportFile) { if (!getState().equals(STATE_ERROR)) { new Reporter().run(sReportFile, m_sOutputDir, m_nCurrentBatch, null, ReportBatch.class.getClassLoader()); } } /** * {@inheritDoc} */ public TabularData runTabularReport(String sReportFile) { boolean fURI = Reporter.isURI(sReportFile); // Reporters for URI based reports are cached. TabularReportRunner runner = fURI ? f_mapReporter.get(sReportFile) : new TabularReportRunner(sReportFile, fURI); if (runner == null) { runner = new TabularReportRunner(sReportFile, fURI); f_mapReporter.put(sReportFile, runner); } return runner.runTabularReport(); } /** * {@inheritDoc} */ public TabularData runTabularGroupReport(String sReportName, Map<String, String> mapXmlReports) { TabularReportRunner runner = new TabularReportRunner(sReportName, mapXmlReports); return runner.runTabularReport(); } /** * {@inheritDoc} */ public String getLastReport() { return m_sLastReport; } /** * {@inheritDoc} */ public Date getLastExecuteTime() { return new Date(getLastExecutionMillis()); } /** * {@inheritDoc} */ public long getRunLastMillis() { return m_lastRuntimeMillis; } /** * {@inheritDoc} */ public long getRunMaxMillis() { return this.m_maxRuntimeMillis; } /** * {@inheritDoc} */ public double getRunAverageMillis() { return (m_cExecutionCount == 0) ? 0.0 : this.m_totalRuntimeMillis / m_cExecutionCount; } /** * {@inheritDoc} */ public void resetStatistics() { m_lastRuntimeMillis = 0; m_maxRuntimeMillis = 0; m_cExecutionCount = 0; m_totalRuntimeMillis = 0; } /** * {@inheritDoc} */ public boolean isCentralized() { return getDependencies().isDistributed(); } // ----- helper methods -------------------------------------------------- protected void updateStats(long ldtStart) { long lRuntime = System.currentTimeMillis() - ldtStart; m_cExecutionCount ++; m_maxRuntimeMillis = (m_maxRuntimeMillis < lRuntime) ? lRuntime : m_maxRuntimeMillis; m_totalRuntimeMillis += lRuntime; m_lastRuntimeMillis = lRuntime; } /** * {@inheritDoc} */ public void setDependencies(Dependencies dps) { if (getDependencies() == null) { m_dependencies = dps = new DefaultDependencies(dps).validate(); } else { throw new IllegalStateException("Reporter dependencies cannot be reset"); } setConfigFile(dps.getConfigFile()); String sTimezone = dps.getTimeZone(); String sTimeStampFormat = dps.getDateFormat(); m_dateFormat = new SimpleDateFormat(sTimeStampFormat); if (!sTimezone.isEmpty()) { m_dateFormat.setTimeZone(getTimeZone(sTimezone)); } } /** * {@inheritDoc} */ public Dependencies getDependencies() { return m_dependencies; } // ----- inner interface -------------------------------------------------- /** * The interface used to provide reporter with its external dependencies. */ public interface Dependencies { /** * Return the report configuration that contain the location for * the Reporter batch. * * @return the report configuration file */ String getConfigFile(); /** * Return the report switch for reporter. * * @return true to enable reporter */ boolean isAutoStart(); /** * Return the distributed flag that specifies whether or not to run * reporter on multiple management node. * * @return true to enable distributed reporter */ boolean isDistributed(); /** * Return the time zone for the generated reports. * * @return time zone */ String getTimeZone(); /** * Return the time stamp format for reporter. * * @return time output format */ String getDateFormat(); } // ----- inner classes -------------------------------------------------- /** * Default {@link Dependencies} implementation. */ public static class DefaultDependencies implements Dependencies { /** * Construct a DefaultReportDependencies object. Uses default value for each dependency. */ public DefaultDependencies() { this(null); } /** * Construct a DefaultReportDependencies object copying the values * from the specified ReporterDependencies object. * * @param deps the dependencies to copy */ public DefaultDependencies(Dependencies deps) { if (deps != null) { m_sConfigFile = deps.getConfigFile(); m_autoStart = deps.isAutoStart(); m_distributed = deps.isDistributed(); m_sTimezone = deps.getTimeZone(); m_sDateFormat = deps.getDateFormat(); } } /** * {@inheritDoc} */ public String getConfigFile() { return m_sConfigFile; } /** * Set the report configuration file. * * @param sConfFile the report configuration file * * @return this object */ public DefaultDependencies setConfigFile(String sConfFile) { m_sConfigFile = sConfFile; return this; } /** * {@inheritDoc} */ public boolean isAutoStart() { return m_autoStart; } /** * Set the reporter switch. * * @param fAutoStart the reporter switch, true to enable reporting. * * @return this object */ public DefaultDependencies setAutoStart(boolean fAutoStart) { m_autoStart = fAutoStart; return this; } /** * {@inheritDoc} */ public boolean isDistributed() { return m_distributed; } /** * Set the distributed flag. * * @param fDistributed specify whether the reporter should run on multiple nodes. * * @return this object */ public DefaultDependencies setDistributed(boolean fDistributed) { m_distributed = fDistributed; return this; } /** * {@inheritDoc} */ public String getTimeZone() { return m_sTimezone; } /** * Set the time zone. * * @param sTimeZone time zone for the reports. * * @return this object */ public DefaultDependencies setTimeZone(String sTimeZone) { m_sTimezone = sTimeZone; return this; } /** * {@inheritDoc} */ public String getDateFormat() { return m_sDateFormat; } /** * Set the time format. * * @param sTimeFormat time stamp format for the reports. * * @return this object */ public DefaultDependencies setDateFormat(String sTimeFormat) { m_sDateFormat = sTimeFormat; return this; } /** * Validate the report configuration. * * @throws IllegalArgumentException if the configuration file are not valid * * @return this object */ public DefaultDependencies validate() { Base.checkNotNull(m_sConfigFile, "configuration"); return this; } // ----- data members of DefaultDependencies -------------------------------- /** * The report configuration file. */ protected String m_sConfigFile = "reports/report-group.xml"; /** * The reporter switch, true to enable reporting. */ protected boolean m_autoStart = false; /** * The distributed flag that specifies whether or not to run reporter * on multiple management node.. */ protected boolean m_distributed = false; /** * The time zone for reports. */ protected String m_sTimezone = ""; /** * The time stamp format for reports. */ protected String m_sDateFormat = "EEE MMM dd HH:mm:ss zzz yyyy"; } // ----- inner classes -------------------------------------------------- /** * TablularReportRunner runs the report and returns the data in a tabular data format. */ public class TabularReportRunner { /** * Construct a {@code TabularReportRunner} using the specified parameters. * * @param sReportOrGroup the URI or contents of either a report group or individual * report file. * @param fURI flag indicating if the report file is a URI or * XML content. */ public TabularReportRunner(String sReportOrGroup, boolean fURI) { f_fURI = fURI; XmlDocument xmlDocument = fURI ? XmlHelper.loadFileOrResource(sReportOrGroup, "Reporter configuration", ReportBatch.class.getClassLoader()) : XmlHelper.loadXml(sReportOrGroup); // could be a report group or a single report f_fReportGrp = xmlDocument.getName().equals("report-group"); if (f_fReportGrp) { f_sReportGroup = sReportOrGroup; f_sReport = null; List xmlReports = xmlDocument.getSafeElement(TAG_LIST).getElementList(); f_mapReports = new LinkedHashMap<String, String>(xmlReports.size()); for (Iterator iter = xmlReports.iterator(); iter.hasNext();) { // Individual reports can only be URI. Hence the value is null. f_mapReports.put(((XmlElement) iter.next()).getSafeElement(TAG_LOCATION).getString(), null); } } else { f_sReport = sReportOrGroup; f_sReportGroup = null; } } /** * Construct a {@code TabularReportRunner} using the specified parameters. * * @param sReportGroup the URI of the report group. * @param mapXmlReports map of Individual report names and their XML content. */ public TabularReportRunner(String sReportGroup, Map<String, String> mapXmlReports) { f_sReportGroup = sReportGroup; f_fURI = false; f_fReportGrp = true; f_sReport = null; // Individual reports can only be XML content. f_mapReports = new LinkedHashMap<String, String>(mapXmlReports); } /** * Run the report. * * @return the data in TabularData format */ public TabularData runTabularReport() { if (!getState().equals(STATE_ERROR)) { if (f_fReportGrp) { int cReports = f_mapReports.size(); OpenType[] aOpenTypes = new OpenType[cReports]; String[] asReportDesc = new String[cReports]; String[] asReportNames = new String[cReports]; Map<String,TabularData> mapTabulars = new HashMap<String,TabularData>(); int i = 0; for (Map.Entry<String, String> entry : f_mapReports.entrySet()) { String sReport = entry.getKey(); String sContent = entry.getValue(); // If the individual reports are URIs, then entry's value will be null and the key is the URI // otherwise entry's value is the report's xml content. int index = sReport.lastIndexOf('/'); String sTabularType = index < 0 ? sReport : sReport.substring(index); TabularData tabData = runSingleReport(sContent == null ? sReport : sContent, sTabularType); asReportNames[i] = sReport; mapTabulars.put(sReport, tabData); if (tabData == null) { aOpenTypes[i] = SimpleType.STRING; asReportDesc[i] = sTabularType; } else { TabularType tabType = tabData.getTabularType(); aOpenTypes[i] = tabType; asReportDesc[i] = tabType.getDescription(); } i++; } try { String sReport = f_sReportGroup; CompositeType rowType = new CompositeType(sReport, sReport, asReportNames, asReportDesc, aOpenTypes); TabularType tabType = new TabularType(sReport, sReport, rowType, asReportNames); TabularDataSupport tabDataSupport = new TabularDataSupport(tabType); tabDataSupport.put(new CompositeDataSupport(rowType, mapTabulars)); return tabDataSupport; } catch (OpenDataException e) { throw Base.ensureRuntimeException(e); } } else { return runSingleReport(f_sReport, f_fURI ? f_sReport : DEFAULT_TABULAR_TYPE_NAME); } } return null; } /** * Run an individual report. * * @param sReport the URI or the content of the report * @param sTabularType the typeName of the {@code TabularType} * * @return report data in TabularData format */ protected TabularData runSingleReport(String sReport, String sTabularType) { // reporter can handle both URI and xml content. Reporter reporter = getReporter(sReport); return reporter.run(sReport, m_sOutputDir, sTabularType, m_nCurrentBatch, null, ReportBatch.class.getClassLoader(), false, true); } /** * Get the Reporter. * * @param sReportFile Report file * * @return Reporter associated with the given report file. */ protected Reporter getReporter(String sReportFile) { Reporter reporter = f_fURI ? f_mapReporter.get(sReportFile) : new Reporter(); if (reporter == null) { reporter = new Reporter(); f_mapReporter.put(sReportFile, reporter); } return reporter; } // ----- data members ------------------------------------------------ /** * Report URI or content. Reporter MBeans can be invoked by passing the report * content(for example when used in JVisualVM) or the report URI. */ protected final String f_sReport; /** * Report Group URI or content. Reporter MBeans can be invoked by passing the report * group content(for example when used in JVisualVM) or the report group URI. */ protected final String f_sReportGroup; /** * Flag indicating if the report name is a URI. */ protected final boolean f_fURI; /** * Flag indicating if the report is a group report. */ protected final boolean f_fReportGrp; /** * Map of Individual report names and their XML content. */ protected Map<String, String> f_mapReports; /** * Map of Individual reports and their associated Reporter. */ protected final Map<String, Reporter> f_mapReporter = new HashMap<String, Reporter>(); } // ----- data members ---------------------------------------------------- /** * The state of the execution thread. */ private AtomicReference<String> m_refState = new AtomicReference<>(STATE_STOPPED); /** * The file name of the last report executed. */ private String m_sLastReport; /** * The output path of the data files. */ private String m_sOutputDir; /** * The batch configuration filename. */ private String m_sConfigFile; /** * The current execution batch. */ private long m_nCurrentBatch; /** * Flag to determine if the process should be running (false stops execution * thread. */ private boolean m_fRun; /** * Array of report configuration file names in the batch. */ private String[] m_asReports; /** * Number of milliseconds to wait between batch executions. */ private long m_nInterval; /** * The batch configuration XML. */ private XmlDocument m_xml; /** * The report execution daemon. */ protected TaskDaemon m_daemon; /** * The start date and time of the last batch execution. */ private long m_ldtLastExecutionMillis; /** * The parameters for each report. */ private XmlElement[] m_aParams; /** * The map of Reporter instances to running in the batch. */ protected Map m_mapReporters = new HashMap(); /** * The last batch execution time in milliseconds. */ protected long m_lastRuntimeMillis; /** * The maximum runtime in milliseconds. */ protected long m_maxRuntimeMillis; /** * The total number of executions. */ protected long m_cExecutionCount; /** * The total runtime in milliseconds. */ protected long m_totalRuntimeMillis; /** * Report dependencies. */ private Dependencies m_dependencies; /** * Date format. */ protected DateFormat m_dateFormat; /** * Map of Reports and their associated TablularReportRunner. Reporters for URI * based reports are cached. */ protected Map<String, TabularReportRunner> f_mapReporter = new HashMap<String, TabularReportRunner>(); // ----- Constants ------------------------------------------------------ /** * The execution thread has been started. */ public static final String STATE_STARTED = "Started"; /** * The controlling thread is attempting to stop the execution thread. */ public static final String STATE_STOPPING = "Stopping"; /** * The execution thread is stopped. */ public static final String STATE_STOPPED = "Stopped"; /** * The execution thread is waiting for the frequency time before running. */ public static final String STATE_WAITING = "Sleeping"; /** * The execution thread is running a report. */ public static final String STATE_RUNNING = "Running"; /** * The Reporter Batch has received an Error and can not continue. */ public static final String STATE_ERROR = "Error"; /** * The frequency which the report batch will execute. */ public static final String TAG_FREQ = "frequency"; /** * The tag in the xml which contains the location of the report configuration. */ public static final String TAG_LOCATION = "location"; /** * The tag in the xml which contains the report configuration pararameters. */ public static final String TAG_PARAMS = "init-params"; /** * The tag in the xml which contains the output path. */ public static final String TAG_DIR = "output-directory"; /** * The tag in the xml which contains the report-list. */ public static final String TAG_LIST = "report-list"; /** * The value of the default frequency if frequency is not specified. */ public static final String DEFAULT_FREQ = "60s"; /** * The constants to be used as the typeName of the {@code TabularType} in non URL based * reports i.e. where report XML's are passed as the content of the Reporter invocation. */ public static final String DEFAULT_TABULAR_TYPE_NAME = "coherence-report.xml"; }
apache/ofbiz-framework
35,054
framework/service/src/main/java/org/apache/ofbiz/service/engine/EntityAutoEngine.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.ofbiz.service.engine; import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Locale; import java.util.Map; import org.apache.ofbiz.base.util.Debug; import org.apache.ofbiz.base.util.GeneralException; import org.apache.ofbiz.base.util.UtilDateTime; import org.apache.ofbiz.base.util.UtilMisc; import org.apache.ofbiz.base.util.UtilProperties; import org.apache.ofbiz.base.util.UtilValidate; import org.apache.ofbiz.entity.GenericValue; import org.apache.ofbiz.entity.finder.PrimaryKeyFinder; import org.apache.ofbiz.entity.model.ModelEntity; import org.apache.ofbiz.entity.model.ModelField; import org.apache.ofbiz.entity.model.ModelUtil; import org.apache.ofbiz.entity.util.EntityQuery; import org.apache.ofbiz.security.SecuredUpload; import org.apache.ofbiz.service.DispatchContext; import org.apache.ofbiz.service.GenericServiceException; import org.apache.ofbiz.service.ModelParam; import org.apache.ofbiz.service.ModelService; import org.apache.ofbiz.service.ServiceDispatcher; import org.apache.ofbiz.service.ServiceUtil; /** * Standard Java Static Method Service Engine */ public final class EntityAutoEngine extends GenericAsyncEngine { private static final String MODULE = EntityAutoEngine.class.getName(); private static final String RESOURCE = "ServiceErrorUiLabels"; private static final List<String> AVAIL_INVOKE_ACTION_NAMES = UtilMisc.toList("create", "update", "delete", "expire"); public EntityAutoEngine(ServiceDispatcher dispatcher) { super(dispatcher); } /** * @see org.apache.ofbiz.service.engine.GenericEngine#runSyncIgnore(java.lang.String, org.apache.ofbiz.service.ModelService, java.util.Map) */ @Override public void runSyncIgnore(String localName, ModelService modelService, Map<String, Object> context) throws GenericServiceException { runSync(localName, modelService, context); } /** * @see org.apache.ofbiz.service.engine.GenericEngine#runSync(java.lang.String, org.apache.ofbiz.service.ModelService, java.util.Map) */ @Override public Map<String, Object> runSync(String localName, ModelService modelService, Map<String, Object> parameters) throws GenericServiceException { // static java service methods should be: public Map<String, Object> methodName(DispatchContext dctx, Map<String, Object> context) if (!isValidText(parameters)) { return ServiceUtil.returnError("Not saved for security reason!"); } DispatchContext dctx = getDispatcher().getLocalContext(localName); Locale locale = (Locale) parameters.get("locale"); Map<String, Object> result = ServiceUtil.returnSuccess(); // check the package and method names if (modelService.getInvoke() == null || !AVAIL_INVOKE_ACTION_NAMES.contains(modelService.getInvoke())) { throw new GenericServiceException("In Service [" + modelService.getName() + "] the invoke value must be create, update, or delete for entity-auto engine"); } if (UtilValidate.isEmpty(modelService.getDefaultEntityName())) { throw new GenericServiceException("In Service [" + modelService.getName() + "] you must specify a default-entity-name for entity-auto engine"); } ModelEntity modelEntity = dctx.getDelegator().getModelEntity(modelService.getDefaultEntityName()); if (modelEntity == null) { throw new GenericServiceException("In Service [" + modelService.getName() + "] the specified default-entity-name [" + modelService.getDefaultEntityName() + "] is not valid"); } try { boolean allPksInOnly = true; List<String> pkFieldNameOutOnly = null; /* Check for each pk if it's : * 1. part IN * 2. or part IN and OUT, but without value but present on parameters map * Help the engine to determinate the operation to realize for a create call or validate that * any pk is present for update/delete call. */ for (ModelField pkField: modelEntity.getPkFieldsUnmodifiable()) { ModelParam pkParam = modelService.getParam(pkField.getName()); boolean pkValueInParameters = pkParam.isIn() && UtilValidate.isNotEmpty(parameters.get(pkParam.getFieldName())); if (pkParam.isOut() && !pkValueInParameters) { if (pkFieldNameOutOnly == null) { pkFieldNameOutOnly = new LinkedList<>(); allPksInOnly = false; } pkFieldNameOutOnly.add(pkField.getName()); } } switch (modelService.getInvoke()) { case "create": result = invokeCreate(dctx, parameters, modelService, modelEntity, allPksInOnly, pkFieldNameOutOnly); break; case "update": result = invokeUpdate(dctx, parameters, modelService, modelEntity, allPksInOnly); break; case "delete": result = invokeDelete(dctx, parameters, modelService, modelEntity, allPksInOnly); break; case "expire": result = invokeExpire(dctx, parameters, modelService, modelEntity, allPksInOnly); if (ServiceUtil.isSuccess(result)) { result = invokeUpdate(dctx, parameters, modelService, modelEntity, allPksInOnly); } break; default: break; } GenericValue crudValue = (GenericValue) result.get("crudValue"); if (crudValue != null) { result.remove("crudValue"); result.putAll(modelService.makeValid(crudValue, ModelService.OUT_PARAM)); } } catch (GeneralException e) { Debug.logError(e, "Error doing entity-auto operation for entity [" + modelEntity.getEntityName() + "] in service [" + modelService.getName() + "]: " + e.toString(), MODULE); return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE, "ServiceEntityAutoOperation", UtilMisc.toMap("entityName", modelEntity.getEntityName(), "serviceName", modelService.getName(), "errorString", e.toString()), locale)); } result.put(ModelService.SUCCESS_MESSAGE, ServiceUtil.makeSuccessMessage(result, "", "", "", "")); return result; } private static Map<String, Object> invokeCreate(DispatchContext dctx, Map<String, Object> parameters, ModelService modelService, ModelEntity modelEntity, boolean allPksInOnly, List<String> pkFieldNameOutOnly) throws GeneralException { Locale locale = (Locale) parameters.get("locale"); GenericValue newEntity = dctx.getDelegator().makeValue(modelEntity.getEntityName()); boolean isSinglePk = modelEntity.getPksSize() == 1; boolean isDoublePk = modelEntity.getPksSize() == 2; Iterator<ModelField> pksIter = modelEntity.getPksIterator(); ModelField singlePkModeField = isSinglePk ? pksIter.next() : null; ModelParam singlePkModelParam = isSinglePk ? modelService.getParam(singlePkModeField.getName()) : null; boolean isSinglePkIn = isSinglePk ? singlePkModelParam.isIn() : false; boolean isSinglePkOut = isSinglePk ? singlePkModelParam.isOut() : false; ModelParam doublePkPrimaryInParam = null; ModelParam doublePkSecondaryOutParam = null; ModelField doublePkSecondaryOutField = null; if (isDoublePk) { ModelField firstPkField = pksIter.next(); ModelParam firstPkParam = modelService.getParam(firstPkField.getName()); ModelField secondPkField = pksIter.next(); ModelParam secondPkParam = modelService.getParam(secondPkField.getName()); if (firstPkParam.isIn() && secondPkParam.isOut()) { doublePkPrimaryInParam = firstPkParam; doublePkSecondaryOutParam = secondPkParam; doublePkSecondaryOutField = secondPkField; } else if (firstPkParam.isOut() && secondPkParam.isIn()) { doublePkPrimaryInParam = secondPkParam; doublePkSecondaryOutParam = firstPkParam; doublePkSecondaryOutField = firstPkField; // } else { // we don't have an IN and an OUT... so do nothing and leave them null } } if (isSinglePk && isSinglePkOut && !isSinglePkIn) { /* **** primary sequenced primary key **** * <auto-attributes include="pk" mode="OUT" optional="false"/> * <make-value entity-name="Example" value-name="newEntity"/> <sequenced-id-to-env sequence-name="Example" env-name="newEntity.exampleId"/> <!-- get the next sequenced ID --> <field-to-result field-name="newEntity.exampleId" result-name="exampleId"/> <set-nonpk-fields map-name="parameters" value-name="newEntity"/> <create-value value-name="newEntity"/> * */ String sequencedId = dctx.getDelegator().getNextSeqId(modelEntity.getEntityName()); newEntity.set(singlePkModeField.getName(), sequencedId); } else if (isSinglePk && isSinglePkOut && isSinglePkIn) { /* **** primary sequenced key with optional override passed in **** * <auto-attributes include="pk" mode="INOUT" optional="true"/> * <make-value value-name="newEntity" entity-name="Product"/> <set-nonpk-fields map-name="parameters" value-name="newEntity"/> <set from-field="parameters.productId" field="newEntity.productId"/> <if-empty field="newEntity.productId"> <sequenced-id-to-env sequence-name="Product" env-name="newEntity.productId"/> <else> <check-id field-name="productId" map-name="newEntity"/> <check-errors/> </else> </if-empty> <field-to-result field-name="productId" map-name="newEntity" result-name="productId"/> <create-value value-name="newEntity"/> * */ Object pkValue = parameters.get(singlePkModelParam.getName()); if (UtilValidate.isEmpty(pkValue)) { pkValue = dctx.getDelegator().getNextSeqId(modelEntity.getEntityName()); } else { // pkValue passed in, check and if there are problems return an error if (pkValue instanceof String) { StringBuffer errorDetails = new StringBuffer(); if (!UtilValidate.isValidDatabaseId((String) pkValue, errorDetails)) { return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE, "ServiceParameterValueNotValid", UtilMisc.toMap("parameterName", singlePkModelParam.getName(), "errorDetails", errorDetails), locale)); } } } newEntity.set(singlePkModeField.getName(), pkValue); GenericValue lookedUpValue = PrimaryKeyFinder.runFind(modelEntity, parameters, dctx.getDelegator(), false, true, null, null); if (lookedUpValue != null) { return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE, "ServiceValueFound", UtilMisc.toMap("pkFields", newEntity.getPkShortValueString()), locale)); } } else if (isDoublePk && doublePkPrimaryInParam != null && doublePkSecondaryOutParam != null) { /* **** secondary sequenced primary key **** * <auto-attributes include="pk" mode="IN" optional="false"/> <override name="exampleItemSeqId" mode="OUT"/> <!-- make this OUT rather than IN, we will automatically generate the * next sub-sequence ID --> * <make-value entity-name="ExampleItem" value-name="newEntity"/> <set-pk-fields map-name="parameters" value-name="newEntity"/> <make-next-seq-id value-name="newEntity" seq-field-name="exampleItemSeqId"/> <!-- this finds the next sub-sequence ID --> <field-to-result field-name="newEntity.exampleItemSeqId" result-name="exampleItemSeqId"/> <set-nonpk-fields map-name="parameters" value-name="newEntity"/> <create-value value-name="newEntity"/> */ newEntity.setPKFields(parameters, true); dctx.getDelegator().setNextSubSeqId(newEntity, doublePkSecondaryOutField.getName(), 5, 1); } else if (allPksInOnly) { /* **** plain specified primary key **** * <auto-attributes include="pk" mode="IN" optional="false"/> * <make-value entity-name="Example" value-name="newEntity"/> <set-pk-fields map-name="parameters" value-name="newEntity"/> <set-nonpk-fields map-name="parameters" value-name="newEntity"/> <create-value value-name="newEntity"/> * */ newEntity.setPKFields(parameters, true); //with all pks present on parameters, check if the entity is not already exists. GenericValue lookedUpValue = PrimaryKeyFinder.runFind(modelEntity, parameters, dctx.getDelegator(), false, true, null, null); if (lookedUpValue != null) { return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE, "ServiceValueFound", UtilMisc.toMap("pkFields", newEntity.getPkShortValueString()), locale)); } } else { /* We haven't all Pk and their are 3 or more, now check if isn't a associate entity with own sequence <set-pk-fields map="parameters" value-field="newEntity"/> <sequenced-id sequence-name="ExempleItemAssoc" field="newEntity.exempleItemAssocId"/> <create-value value-field="newEntity"/> */ if (pkFieldNameOutOnly != null && pkFieldNameOutOnly.size() == 1) { newEntity.setPKFields(parameters, true); String pkFieldName = pkFieldNameOutOnly.get(0); //if it's a fromDate, don't update it now, it's will be done next step if (!"fromDate".equals(pkFieldName)) { String pkValue = dctx.getDelegator().getNextSeqId(modelEntity.getEntityName()); newEntity.set(pkFieldName, pkValue); } } else { throw new GenericServiceException("In Service [" + modelService.getName() + "] which uses the entity-auto engine with the create invoke option: " + "could not find a valid combination of primary key settings to do a known create operation; options include: " + "1. a single OUT pk for primary auto-sequencing, " + "2. a single INOUT pk for primary auto-sequencing with optional override, " + "3. a 2-part pk with one part IN (existing primary pk) and one part OUT (the secondary pk to sub-sequence), " + "4. a N-part pk with N-1 part IN and one party OUT only (missing pk is a sub-sequence mainly for entity assoc), " + "5. all pk fields are IN for a manually specified primary key"); } } // handle the case where there is a fromDate in the pk of the entity, and it is optional or undefined in the service def, // populate automatically ModelField fromDateField = modelEntity.getField("fromDate"); if (fromDateField != null && fromDateField.getIsPk()) { ModelParam fromDateParam = modelService.getParam("fromDate"); if (fromDateParam == null || parameters.get("fromDate") == null) { newEntity.set("fromDate", UtilDateTime.nowTimestamp()); } } newEntity.setNonPKFields(parameters, true); if (modelEntity.getField("createdDate") != null) { newEntity.set("createdDate", UtilDateTime.nowTimestamp()); if (modelEntity.getField("createdByUserLogin") != null) { GenericValue userLogin = (GenericValue) parameters.get("userLogin"); if (userLogin != null) { newEntity.set("createdByUserLogin", userLogin.get("userLoginId")); if (modelEntity.getField("lastModifiedByUserLogin") != null) { newEntity.set("lastModifiedByUserLogin", userLogin.get("userLoginId")); } else if (modelEntity.getField("changedByUserLogin") != null) { newEntity.set("changedByUserLogin", userLogin.get("userLoginId")); } } } if (modelEntity.getField("lastModifiedDate") != null) { newEntity.set("lastModifiedDate", UtilDateTime.nowTimestamp()); } else if (modelEntity.getField("changedDate") != null) { newEntity.set("changedDate", UtilDateTime.nowTimestamp()); } } if (modelEntity.getField("changeByUserLoginId") != null) { GenericValue userLogin = (GenericValue) parameters.get("userLogin"); if (userLogin != null) { newEntity.set("changeByUserLoginId", userLogin.get("userLoginId")); } else { throw new GenericServiceException("You call a creation on entity that require the userLogin to track the activity," + " please control that your service definition has auth='true'"); } //Oh changeByUserLoginId detected, check if an EntityStatus concept if (modelEntity.getEntityName().endsWith("Status")) { if (modelEntity.getField("statusDate") != null && parameters.get("statusDate") == null) { newEntity.set("statusDate", UtilDateTime.nowTimestamp()); //if a statusEndDate is present, resolve the last EntityStatus to store this value on the previous element if (modelEntity.getField("statusEndDate") != null) { ModelEntity relatedEntity = dctx.getDelegator().getModelEntity(modelEntity.getEntityName().replaceFirst("Status", "")); if (relatedEntity != null) { Map<String, Object> conditionRelatedPkFieldMap = new HashMap<>(); for (String pkRelatedField : relatedEntity.getPkFieldNames()) { conditionRelatedPkFieldMap.put(pkRelatedField, parameters.get(pkRelatedField)); } GenericValue previousStatus = EntityQuery.use(newEntity.getDelegator()).from(modelEntity.getEntityName()) .where(conditionRelatedPkFieldMap).orderBy("-statusDate").queryFirst(); if (previousStatus != null) { previousStatus.put("statusEndDate", newEntity.get("statusDate")); previousStatus.store(); } } } } } } newEntity.create(); Map<String, Object> result = ServiceUtil.returnSuccess(UtilProperties.getMessage("ServiceUiLabels", "EntityCreatedSuccessfully", UtilMisc.toMap("label", modelEntity.getTitle()), locale)); result.put("crudValue", newEntity); return result; } private static Map<String, Object> invokeUpdate(DispatchContext dctx, Map<String, Object> parameters, ModelService modelService, ModelEntity modelEntity, boolean allPksInOnly) throws GeneralException { Locale locale = (Locale) parameters.get("locale"); Map<String, Object> localContext = new HashMap<>(); localContext.put("parameters", parameters); Map<String, Object> result = ServiceUtil.returnSuccess(); /* <auto-attributes include="pk" mode="IN" optional="false"/> <entity-one entity-name="ExampleItem" value-name="lookedUpValue"/> <set-nonpk-fields value-name="lookedUpValue" map-name="parameters"/> <store-value value-name="lookedUpValue"/> */ // check to make sure that all primary key fields are defined as IN attributes if (!allPksInOnly) { throw new GenericServiceException("In Service [" + modelService.getName() + "] which uses the entity-auto engine with the update" + " invoke option not all pk fields have the mode IN"); } GenericValue lookedUpValue = PrimaryKeyFinder.runFind(modelEntity, parameters, dctx.getDelegator(), false, true, null, null); if (lookedUpValue == null) { return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE, "ServiceValueNotFound", locale)); } localContext.put("lookedUpValue", lookedUpValue); // populate the oldStatusId or oldItemStatusId out if there is a service parameter for it, and before we do the set non-pk fields /* <auto-attributes include="pk" mode="IN" optional="false"/> <attribute name="oldStatusId" type="String" mode="OUT" optional="false"/> <field-to-result field-name="lookedUpValue.statusId" result-name="oldStatusId"/> OR <auto-attributes include="pk" mode="IN" optional="false"/> <attribute name="oldItemStatusId" type="String" mode="OUT" optional="false"/> <field-to-result field-name="lookedUpValue.itemStatusId" result-name="oldItemStatusId"/> */ for (String statusField: UtilMisc.toList("statusId", "itemStatusId")) { ModelParam statusIdParam = modelService.getParam(statusField); ModelField statusIdModelField = modelEntity.getField(statusField); String oldStatusField = "old" + ModelUtil.upperFirstChar(statusField); ModelParam oldStatusIdParam = modelService.getParam(oldStatusField); if (statusIdParam != null && statusIdParam.isIn() && oldStatusIdParam != null && oldStatusIdParam.isOut() && statusIdModelField != null) { result.put(oldStatusField, lookedUpValue.get(statusField)); } // do the StatusValidChange check /* <if-compare-field field="lookedUpValue.statusId" operator="not-equals" to-field="parameters.statusId"> <!-- if the record exists there should be a statusId, but just in case make it so it won't blow up --> <if-not-empty field="lookedUpValue.statusId"> <!-- if statusId change is not in the StatusValidChange list, complain... --> <entity-one entity-name="StatusValidChange" value-name="statusValidChange" auto-field-map="false"> <field-map field-name="statusId" env-name="lookedUpValue.statusId"/> <field-map field-name="statusIdTo" env-name="parameters.statusId"/> </entity-one> <if-empty field="statusValidChange"> <!-- no valid change record found? return an error... --> <add-error><fail-property resource="CommonUiLabels" property="CommonErrorNoStatusValidChange"/></add-error> <check-errors/> </if-empty> </if-not-empty> </if-compare-field> */ String statusIdParamValue = (String) parameters.get(statusField); if (statusIdParam != null && statusIdParam.isIn() && UtilValidate.isNotEmpty(statusIdParamValue) && statusIdModelField != null) { String lookedUpStatusId = (String) lookedUpValue.get(statusField); if (UtilValidate.isNotEmpty(lookedUpStatusId) && !statusIdParamValue.equals(lookedUpStatusId)) { // there was an old status, and in this call we are trying to change it, so do the StatusValidChange check GenericValue statusValidChange = dctx.getDelegator().findOne("StatusValidChange", true, "statusId", lookedUpStatusId, "statusIdTo", statusIdParamValue); if (statusValidChange == null) { // uh-oh, no valid change... return ServiceUtil.returnError(UtilProperties.getMessage("CommonUiLabels", "CommonErrorNoStatusValidChange", localContext, locale)); } } } // NOTE: nothing here to maintain the status history, that should be done with a custom service called by SECA rule } lookedUpValue.setNonPKFields(parameters, true); if (modelEntity.getField("lastModifiedDate") != null || modelEntity.getField("changedDate") != null) { if (modelEntity.getField("lastModifiedDate") != null) { lookedUpValue.set("lastModifiedDate", UtilDateTime.nowTimestamp()); } else { lookedUpValue.set("changedDate", UtilDateTime.nowTimestamp()); } if (modelEntity.getField("lastModifiedByUserLogin") != null || modelEntity.getField("changedByUserLogin") != null) { GenericValue userLogin = (GenericValue) parameters.get("userLogin"); if (userLogin != null) { if (modelEntity.getField("lastModifiedByUserLogin") != null) { lookedUpValue.set("lastModifiedByUserLogin", userLogin.get("userLoginId")); } else { lookedUpValue.set("changedByUserLogin", userLogin.get("userLoginId")); } } } } if (modelEntity.getField("changeByUserLoginId") != null) { if (modelEntity.getEntityName().endsWith("Status")) { //Oh update on EntityStatus concept detected ... not possible, return invalid request throw new GenericServiceException("You call a updating operation on entity that track the activity, sorry I can't do that," + "please amazing developer check your service definition;)"); } GenericValue userLogin = (GenericValue) parameters.get("userLogin"); if (userLogin != null) { lookedUpValue.set("changeByUserLoginId", userLogin.get("userLoginId")); } else { throw new GenericServiceException("You call a updating operation on entity that track the activity, sorry I can't do that," + "please amazing developer check your service definition;)"); } } lookedUpValue.store(); result.put("crudValue", lookedUpValue); result.put(ModelService.SUCCESS_MESSAGE, UtilProperties.getMessage("ServiceUiLabels", "EntityUpdatedSuccessfully", UtilMisc.toMap("label", modelEntity.getTitle()), locale)); return result; } private static Map<String, Object> invokeDelete(DispatchContext dctx, Map<String, Object> parameters, ModelService modelService, ModelEntity modelEntity, boolean allPksInOnly) throws GeneralException { Locale locale = (Locale) parameters.get("locale"); /* <auto-attributes include="pk" mode="IN" optional="false"/> <entity-one entity-name="ExampleItem" value-name="lookedUpValue"/> <remove-value value-name="lookedUpValue"/> */ // check to make sure that all primary key fields are defined as IN attributes if (!allPksInOnly) { throw new GenericServiceException("In Service [" + modelService.getName() + "] which uses the entity-auto engine with the delete" + "invoke option not all pk fields have the mode IN"); } if (modelEntity.getField("changeByUserLoginId") != null) { if (modelEntity.getEntityName().endsWith("Status")) { //Oh update on EntityStatus concept detected ... not possible, return invalid request throw new GenericServiceException("You call a deleting operation on entity that track the activity, sorry I can't do that," + "please amazing developer check your service definition;)"); } } GenericValue lookedUpValue = PrimaryKeyFinder.runFind(modelEntity, parameters, dctx.getDelegator(), false, true, null, null); if (lookedUpValue == null) { return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE, "ServiceValueNotFoundForRemove", locale)); } lookedUpValue.remove(); Map<String, Object> result = ServiceUtil.returnSuccess(UtilProperties.getMessage("ServiceUiLabels", "EntityDeletedSuccessfully", UtilMisc.toMap("label", modelEntity.getTitle()), locale)); return result; } /** * Analyse the entity, service and parameter to resolve the field to update with what value * @param dctx * @param parameters * @param modelService * @param modelEntity * @param allPksInOnly * @return * @throws GeneralException */ private static Map<String, Object> invokeExpire(DispatchContext dctx, Map<String, Object> parameters, ModelService modelService, ModelEntity modelEntity, boolean allPksInOnly) throws GeneralException { Locale locale = (Locale) parameters.get("locale"); List<String> fieldThruDates = new LinkedList<>(); boolean thruDatePresent = false; String fieldDateNameIn = null; // check to make sure that all primary key fields are defined as IN attributes if (!allPksInOnly) { throw new GenericServiceException("In Service [" + modelService.getName() + "] which uses the entity-auto engine with the update" + "invoke option not all pk fields have the mode IN"); } GenericValue lookedUpValue = PrimaryKeyFinder.runFind(modelEntity, parameters, dctx.getDelegator(), false, true, null, null); if (lookedUpValue == null) { return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE, "ServiceValueNotFound", locale)); } //check if a non pk date field is present on parameters for (String fieldDateName : modelEntity.getNoPkFieldNames()) { if ("thruDate".equals(fieldDateName)) { thruDatePresent = true; } else if (fieldDateName.endsWith("ThruDate")) { fieldThruDates.add(fieldDateName); } else if (fieldDateName.startsWith("thru") && fieldDateName.endsWith("Date")) { fieldThruDates.add(fieldDateName); } else if (fieldDateNameIn == null && modelService.getParam(fieldDateName) != null && modelEntity.getField(fieldDateName).getType().contains("date")) { fieldDateNameIn = fieldDateName; } } if (Debug.infoOn()) { Debug.logInfo(" FIELD FOUND : " + fieldDateNameIn + " ## # " + fieldThruDates + " ### " + thruDatePresent, MODULE); } if (Debug.infoOn()) { Debug.logInfo(" parameters IN : " + parameters, MODULE); } // Resolve the field without value to expire and check if the value is present on parameters or use now if (fieldDateNameIn != null) { if (parameters.get(fieldDateNameIn) == null) { parameters.put(fieldDateNameIn, UtilDateTime.nowTimestamp()); } } // Expire thruDate fields if (thruDatePresent && UtilValidate.isEmpty(lookedUpValue.getTimestamp("thruDate"))) { if (UtilValidate.isEmpty(parameters.get("thruDate"))) { parameters.put("thruDate", UtilDateTime.nowTimestamp()); } } else { for (String fieldDateName: fieldThruDates) { if (UtilValidate.isEmpty(lookedUpValue.getTimestamp(fieldDateName))) { if (UtilValidate.isEmpty(parameters.get(fieldDateName))) { parameters.put(fieldDateName, UtilDateTime.nowTimestamp()); } break; } } } if (Debug.infoOn()) { Debug.logInfo(" parameters OUT : " + parameters, MODULE); } Map<String, Object> result = ServiceUtil.returnSuccess(UtilProperties.getMessage("ServiceUiLabels", "EntityExpiredSuccessfully", UtilMisc.toMap("label", modelEntity.getTitle()), locale)); return result; } private static boolean isValidText(Map<String, Object> parameters) { // TODO maybe more parameters will be needed in future... String parameter = (String) parameters.get("webappPath"); if (parameter != null) { try { if (!SecuredUpload.isValidText(parameter, Collections.emptyList())) { Debug.logError("================== Not saved for security reason ==================", MODULE); return false; } } catch (IOException e) { Debug.logError("================== Not saved for security reason ==================", MODULE); return false; } } return true; } }
oracle/fastr
35,408
com.oracle.truffle.r.nodes/src/com/oracle/truffle/r/nodes/builtin/CastBuilder.java
/* * Copyright (c) 2013, 2020, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 3 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 3 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 3 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.oracle.truffle.r.nodes.builtin; import java.util.Arrays; import java.util.function.Function; import com.oracle.truffle.api.CompilerAsserts; import com.oracle.truffle.api.CompilerDirectives; import com.oracle.truffle.api.CompilerDirectives.TruffleBoundary; import com.oracle.truffle.api.interop.TruffleObject; import com.oracle.truffle.r.nodes.builtin.casts.Filter; import com.oracle.truffle.r.nodes.builtin.casts.Filter.AndFilter; import com.oracle.truffle.r.nodes.builtin.casts.Filter.CompareFilter; import com.oracle.truffle.r.nodes.builtin.casts.Filter.DoubleFilter; import com.oracle.truffle.r.nodes.builtin.casts.Filter.MatrixFilter; import com.oracle.truffle.r.nodes.builtin.casts.Filter.MissingFilter; import com.oracle.truffle.r.nodes.builtin.casts.Filter.NotFilter; import com.oracle.truffle.r.nodes.builtin.casts.Filter.NullFilter; import com.oracle.truffle.r.nodes.builtin.casts.Filter.OrFilter; import com.oracle.truffle.r.nodes.builtin.casts.Filter.RTypeFilter; import com.oracle.truffle.r.nodes.builtin.casts.Filter.TypeFilter; import com.oracle.truffle.r.nodes.builtin.casts.Mapper; import com.oracle.truffle.r.nodes.builtin.casts.Mapper.MapByteToBoolean; import com.oracle.truffle.r.nodes.builtin.casts.Mapper.MapDoubleToInt; import com.oracle.truffle.r.nodes.builtin.casts.Mapper.MapToCharAt; import com.oracle.truffle.r.nodes.builtin.casts.Mapper.MapToValue; import com.oracle.truffle.r.runtime.MessageData; import com.oracle.truffle.r.nodes.builtin.casts.PipelineStep; import com.oracle.truffle.r.nodes.builtin.casts.PipelineStep.CoercionStep; import com.oracle.truffle.r.nodes.builtin.casts.PipelineStep.FilterStep; import com.oracle.truffle.r.nodes.builtin.casts.PipelineStep.MapIfStep; import com.oracle.truffle.r.nodes.builtin.casts.PipelineStep.MapStep; import com.oracle.truffle.r.nodes.builtin.casts.PipelineStep.NotNAStep; import com.oracle.truffle.r.nodes.builtin.casts.fluent.ChainBuilder; import com.oracle.truffle.r.nodes.builtin.casts.fluent.FindFirstNodeBuilder; import com.oracle.truffle.r.nodes.builtin.casts.fluent.InitialPhaseBuilder; import com.oracle.truffle.r.nodes.builtin.casts.fluent.PipelineBuilder; import com.oracle.truffle.r.nodes.builtin.casts.fluent.PreinitialPhaseBuilder; import com.oracle.truffle.r.runtime.nodes.unary.CastNode; import com.oracle.truffle.r.runtime.RError; import com.oracle.truffle.r.runtime.RInternalError; import com.oracle.truffle.r.runtime.RRuntime; import com.oracle.truffle.r.runtime.RType; import com.oracle.truffle.r.runtime.builtins.RBuiltin; import com.oracle.truffle.r.runtime.data.RArgsValuesAndNames; import com.oracle.truffle.r.runtime.data.RComplex; import com.oracle.truffle.r.runtime.data.RDataFactory; import com.oracle.truffle.r.runtime.data.RDoubleVector; import com.oracle.truffle.r.runtime.data.RFunction; import com.oracle.truffle.r.runtime.data.RList; import com.oracle.truffle.r.runtime.data.RMissing; import com.oracle.truffle.r.runtime.data.RNull; import com.oracle.truffle.r.runtime.data.RComplexVector; import com.oracle.truffle.r.runtime.data.RIntVector; import com.oracle.truffle.r.runtime.data.RRaw; import com.oracle.truffle.r.runtime.data.model.RAbstractListVector; import com.oracle.truffle.r.runtime.data.RLogicalVector; import com.oracle.truffle.r.runtime.data.RRawVector; import com.oracle.truffle.r.runtime.data.RStringVector; import com.oracle.truffle.r.runtime.data.model.RAbstractVector; public final class CastBuilder { private static final PipelineBuilder[] EMPTY_BUILDERS = new PipelineBuilder[0]; private final RBuiltin builtin; private final String[] argumentNames; private PipelineBuilder[] argumentBuilders; public CastBuilder(RBuiltin builtin) { // Note: if we have the builtin metadata, we pre-allocate the arrays, builtinNode != null is // used to determine, if the arrays are pre-allocated or if they can grow if (builtin == null) { this.builtin = null; argumentNames = null; argumentBuilders = EMPTY_BUILDERS; } else { this.builtin = builtin; argumentNames = builtin.parameterNames(); argumentBuilders = new PipelineBuilder[builtin.parameterNames().length]; } } public CastBuilder(int argumentsCount) { assert argumentsCount >= 0 : "argumentsCount must be non-negative"; builtin = null; argumentNames = null; argumentBuilders = new PipelineBuilder[argumentsCount]; } public CastBuilder() { builtin = null; argumentNames = null; argumentBuilders = EMPTY_BUILDERS; } /** * Returns the first case node in the chain for each argument, if argument does not require any * casting, returns {@code null} as its cast node. */ public CastNode[] getCasts() { CastNode[] castNodes = new CastNode[argumentBuilders.length]; for (int i = 0; i < argumentBuilders.length; i++) { PipelineBuilder arg = argumentBuilders[i]; if (arg != null) { castNodes[i] = arg.buildNode(); } } return castNodes; } /** * The argument pipeline builders are needed when analyzing pipelines without instantiating * them. It's useful in tests. * * @return the arguments cast pipeline builders */ public PipelineBuilder[] getPipelineBuilders() { return argumentBuilders; } // --------------------- // The cast-pipelines API starts here /** * Returns a builder of a cast pipeline for the given argument name. * <p> * The process of building a cast pipeline proceeds in up-to four phases: * * <pre> * Pre-initialPhase -> (InitialPhase -> (CoercedPhase -> HeadPhase?)?)? * </pre> * * In the pre-initial phase one can configure the overall behavior of the pipeline. Currently, * only the default handling of {@code RNull} and {@code RMissing} values can be overridden (the * default behavior is explained below). The pipeline can be configured using any method of the * {@link PreinitialPhaseBuilder} class, e.g. {@link PreinitialPhaseBuilder#allowNull()}. * </p> * <p> * In the initial phase, the pipeline can be configured to filter or to coerce the input * argument to one of the available vector types. By using filters one can narrow down the * expected type of the argument. The API reflects that narrowing type in subsequent builder * steps. Other filters can put constrains on argument values. Filter conditions can be combined * by means of <code>and</code>, <code>or</code> and <code>not</code> operators. To coerce the * input argument to a vector one of the <code>as<X>Vector</code> steps is used, where * <code>X</code> is the element type of the given vector. Using a coercion step leads to the * transition to the coerced phase. * </p> * <p> * In the coerced phase one can specify filters examining a vector argument's properties, such * as the size or dimensions. The coerced phase can be followed by the head phase once a * {@code findFirst} step is used. In this phase the vector argument type is narrowed down to a * scalar value. * </p> * <p> * During any phase, one can add filter and mapper steps. The methods creating such steps, e.g. * {@link InitialPhaseBuilder#mustBe(Filter)}, usually take {@link Filter} or {@link Mapper} * instance. Use convenient static methods in the {@link Predef} class to construct these * instances. * </p> * <p> * Notable is the {@code mapIf} step, which allows to split the pipeline into two eventualities * depending on the filter condition. The second and third argument, namely * {@code trueBranchMapper} and {@code falseBranchMapper}, can be simple mappers, e.g. * {@link Predef#toBoolean()} or one can construct more complex mapping using * {@link Predef#chain(PipelineStep)} invocation followed by {@code with(step)} calls and * finished by {@code end()} invocation. The steps can be constructed using convenient methods * in the {@link Predef} class. * * Note: For technical reasons, when using 'find first' step by means of * {@link Predef#findFirst()} in this situation, it must be followed by call to * {@link FindFirstNodeBuilder#integerElement()} or other similar method corresponding to the * expected element type. * </p> * * <h2>Handling {@code RNull} and {@code RMissing} values</h2> By default, {@code RNull} and * {@code RMissing} argument values are sent to the pipeline. While most of the pipeline cast * nodes ignore those values and let them pass through, there are some nodes that may perform * some transformation of those values. For example, the {@code FindFirstNode} node replaces * both {@code RNull} and {@code RMissing} by the replacement values specified in the * corresponding <code>findFirst(repl)</code> pipeline step. Also the {@code CastToVectorNode} * coercion node replaces those values by an empty list provided that the * <code>isPreserveNonVector</code> flag is set. * * <h3>Overriding the default behavior</h3> A cast pipeline can be configured not to send * {@code RNull} and/or {@code RMissing} to the cast nodes forming the cast pipeline. Then those * values either bypass the pipeline, being eventually transformed to some constant, or an error * is raised. * * One can use the following steps in the pre-initial phase to override the default behavior: * * <pre> * allowNull() - RNull bypasses the pipeline * mustNotBeNull(errorMsg) - the error with errorMsg is raised when the input argument is RNull * mapNull(mapper) - RNull is transformed using the mapper. The RNull replacement bypasses the pipeline. * </pre> * * Analogous methods exist for {@code RMissing}. */ public PreinitialPhaseBuilder arg(String argumentName) { assert builtin != null : "arg(String) is only supported for builtins cast pipelines"; return getBuilder(getArgumentIndex(argumentName), argumentName).fluent(); } /** * @see #arg(String) */ public PreinitialPhaseBuilder arg(int argumentIndex, String argumentName) { assert argumentNames == null || argumentIndex >= 0 && argumentIndex < argumentBuilders.length : "argument index out of range"; assert argumentNames == null || argumentNames[argumentIndex].equals(argumentName) : "wrong argument name " + argumentName; return getBuilder(argumentIndex, argumentName).fluent(); } /** * @see #arg(String) */ public PreinitialPhaseBuilder arg(int argumentIndex) { boolean existingIndex = argumentNames != null && argumentIndex >= 0 && argumentIndex < argumentNames.length; String name = existingIndex ? argumentNames[argumentIndex] : null; return getBuilder(argumentIndex, name).fluent(); } private PipelineBuilder getBuilder(int argumentIndex, String argumentName) { if (builtin == null && argumentIndex >= argumentBuilders.length) { // in the case that we have a builtin, the arguments size is known and fixed, otherwise // we grow the array accordingly argumentBuilders = Arrays.copyOf(argumentBuilders, argumentIndex + 1); } if (argumentBuilders[argumentIndex] == null) { argumentBuilders[argumentIndex] = new PipelineBuilder(argumentName); } return argumentBuilders[argumentIndex]; } private int getArgumentIndex(String argumentName) { if (builtin == null) { throw new IllegalArgumentException("No builtin node associated with cast builder"); } for (int i = 0; i < argumentNames.length; i++) { if (argumentName.equals(argumentNames[i])) { return i; } } CompilerDirectives.transferToInterpreter(); throw RInternalError.shouldNotReachHere(String.format("Argument %s not found in builtin %s", argumentName, builtin.name())); } public static final class Predef { @SuppressWarnings("unchecked") public static <T> NotFilter<T> not(Filter<? super T, ? extends T> filter) { NotFilter<? super T> n = filter.not(); return (NotFilter<T>) n; } public static <T> AndFilter<T, T> and(Filter<T, T> filter1, Filter<T, T> filter2) { return filter1.and(filter2); } public static <T> OrFilter<T> or(Filter<T, T> filter1, Filter<T, T> filter2) { return filter1.or(filter2); } public static <T, R extends T> PipelineStep<T, R> mustBe(Filter<T, R> argFilter, RError.Message message, Object... messageArgs) { return new FilterStep<>(argFilter, new MessageData(message, messageArgs), false); } public static <T, R extends T> PipelineStep<T, R> mustBe(Filter<T, R> argFilter) { return new FilterStep<>(argFilter, null, false); } public static <T> PipelineStep<T, T> shouldBe(Filter<T, ? extends T> argFilter, RError.Message message, Object... messageArgs) { return new FilterStep<>(argFilter, new MessageData(message, messageArgs), true); } public static <T> PipelineStep<T, T> shouldBe(Filter<T, ? extends T> argFilter) { return new FilterStep<>(argFilter, null, true); } public static <T, R> PipelineStep<T, R> map(Mapper<T, R> mapper) { return new MapStep<>(mapper); } public static <T, S extends T, R> PipelineStep<T, R> mapIf(Filter<? super T, S> filter, PipelineStep<?, ?> trueBranch, PipelineStep<?, ?> falseBranch) { return new MapIfStep<>(filter, trueBranch, falseBranch, false); } public static <T, S extends T, R> PipelineStep<T, R> returnIf(Filter<? super T, S> filter, PipelineStep<?, ?> trueBranch, PipelineStep<?, ?> falseBranch) { return new MapIfStep<>(filter, trueBranch, falseBranch, true); } public static <T, S extends T, R> PipelineStep<T, R> mapIf(Filter<? super T, S> filter, PipelineStep<?, ?> trueBranch) { return mapIf(filter, trueBranch, null); } public static <T, S extends T, R> PipelineStep<T, R> returnIf(Filter<? super T, S> filter, PipelineStep<?, ?> trueBranch) { return returnIf(filter, trueBranch, null); } public static <T> ChainBuilder<T> chain(PipelineStep<T, ?> firstStep) { return new ChainBuilder<>(firstStep); } public static <T> PipelineStep<T, Integer> asInteger() { return new CoercionStep<>(RType.Integer, false); } public static <T> PipelineStep<T, RIntVector> asIntegerVector() { return new CoercionStep<>(RType.Integer, true); } public static <T> PipelineStep<T, RIntVector> asIntegerVectorClosure() { return new CoercionStep<>(RType.Integer, true, false, false, false, true, true); } public static <T> PipelineStep<T, RIntVector> asIntegerVector(boolean preserveNames, boolean preserveDimensions, boolean preserveAttributes) { return new CoercionStep<>(RType.Integer, true, preserveNames, preserveDimensions, preserveAttributes, true, false); } public static <T> PipelineStep<T, RIntVector> asIntegerVectorClosure(boolean preserveNames, boolean preserveDimensions, boolean preserveAttributes) { return new CoercionStep<>(RType.Integer, true, preserveNames, preserveDimensions, preserveAttributes, true, true); } public static <T> PipelineStep<T, Double> asDouble() { return new CoercionStep<>(RType.Double, false); } public static <T> PipelineStep<T, RDoubleVector> asDoubleVector() { return new CoercionStep<>(RType.Double, true); } public static <T> PipelineStep<T, RDoubleVector> asDoubleVectorClosure() { return new CoercionStep<>(RType.Double, true, false, false, false, true, true); } public static <T> PipelineStep<T, RDoubleVector> asDoubleVector(boolean preserveNames, boolean preserveDimensions, boolean preserveAttributes) { return new CoercionStep<>(RType.Double, true, preserveNames, preserveDimensions, preserveAttributes); } public static <T> PipelineStep<T, RDoubleVector> asDoubleVectorClosure(boolean preserveNames, boolean preserveDimensions, boolean preserveAttributes) { return new CoercionStep<>(RType.Double, true, preserveNames, preserveDimensions, preserveAttributes, true, true); } public static <T> PipelineStep<T, String> asString() { return new CoercionStep<>(RType.Character, false); } public static <T> PipelineStep<T, RStringVector> asStringVector() { return new CoercionStep<>(RType.Character, true); } public static <T> PipelineStep<T, RStringVector> asStringVector(boolean preserveNames, boolean preserveDimensions, boolean preserveAttributes) { return new CoercionStep<>(RType.Character, true, preserveNames, preserveDimensions, preserveAttributes); } public static <T> PipelineStep<T, RComplexVector> asComplexVector() { return new CoercionStep<>(RType.Complex, true); } public static <T> PipelineStep<T, RRawVector> asRawVector() { return new CoercionStep<>(RType.Raw, true); } public static <T> PipelineStep<T, Byte> asLogical() { return new CoercionStep<>(RType.Logical, false); } public static <T> PipelineStep<T, RLogicalVector> asLogicalVector() { return new CoercionStep<>(RType.Logical, true); } public static <T> PipelineStep<T, RLogicalVector> asLogicalVector(boolean preserveNames, boolean preserveDimensions, boolean preserveAttributes) { return new CoercionStep<>(RType.Logical, true, preserveNames, preserveDimensions, preserveAttributes, false, false); } public static PipelineStep<Byte, Boolean> asBoolean() { return map(toBoolean()); } public static <T> PipelineStep<T, RAbstractVector> asVector() { return new CoercionStep<>(RType.Any, /* vectorCoercion: */true); } public static <T> PipelineStep<T, RAbstractVector> asVector(boolean preserveNonVector) { return new CoercionStep<>(RType.Any, true, false, false, false, preserveNonVector, false); } /** * Version of {@code findFirst} step that can be used in {@code chain}, must be followed by * call for {@code xyzElement()}. */ public static <V extends RAbstractVector> FindFirstNodeBuilder findFirst(RError.Message message, Object... messageArgs) { return new FindFirstNodeBuilder(new MessageData(message, messageArgs)); } /** * Version of {@code findFirst} step that can be used in {@code chain}, must be followed by * call for {@code xyzElement()}. */ public static <V extends RAbstractVector> FindFirstNodeBuilder findFirst() { return new FindFirstNodeBuilder(null); } public static <T> PipelineStep<T, T> mustNotBeNA(RError.Message message, Object... messageArgs) { return new NotNAStep<>(null, new MessageData(message, messageArgs)); } public static <T> PipelineStep<T, T> shouldNotBeNA(T naReplacement, RError.Message message, Object... messageArgs) { return new NotNAStep<>(naReplacement, new MessageData(message, messageArgs)); } public static <T> PipelineStep<T, T> replaceNA(T naReplacement) { return new NotNAStep<>(naReplacement, null); } public static <T> PipelineStep<T, T> mustNotBeNA() { return new NotNAStep<>(null, null); } public static <T> PipelineStep<T, T> boxPrimitive() { return new PipelineStep.BoxPrimitiveStep<>(); } public static NullFilter nullValue() { return NullFilter.INSTANCE; } public static MissingFilter missingValue() { return MissingFilter.INSTANCE; } public static <T> CompareFilter<T> equalTo(T x) { return new CompareFilter<>(CompareFilter.EQ, new CompareFilter.ScalarValue(x, RType.Any)); } public static <T extends RAbstractVector> CompareFilter<T> notEmpty() { return new CompareFilter<>(CompareFilter.GT, new CompareFilter.VectorSize(0)); } public static <T extends RAbstractVector> CompareFilter<T> singleElement() { return new CompareFilter<>(CompareFilter.EQ, new CompareFilter.VectorSize(1)); } public static <T extends RAbstractVector> CompareFilter<T> size(int s) { return new CompareFilter<>(CompareFilter.EQ, new CompareFilter.VectorSize(s)); } public static CompareFilter<RStringVector> elementAt(int index, String value) { return new CompareFilter<>(CompareFilter.EQ, new CompareFilter.ElementAt(index, value, RType.Character)); } public static CompareFilter<RIntVector> elementAt(int index, int value) { return new CompareFilter<>(CompareFilter.EQ, new CompareFilter.ElementAt(index, value, RType.Integer)); } public static CompareFilter<RDoubleVector> elementAt(int index, double value) { return new CompareFilter<>(CompareFilter.EQ, new CompareFilter.ElementAt(index, value, RType.Double)); } public static CompareFilter<RComplexVector> elementAt(int index, RComplex value) { return new CompareFilter<>(CompareFilter.EQ, new CompareFilter.ElementAt(index, value, RType.Complex)); } public static CompareFilter<RLogicalVector> elementAt(int index, byte value) { return new CompareFilter<>(CompareFilter.EQ, new CompareFilter.ElementAt(index, value, RType.Logical)); } public static <T extends RAbstractVector> MatrixFilter<T> matrix() { return MatrixFilter.isMatrixFilter(); } public static <T extends RAbstractVector> MatrixFilter<T> squareMatrix() { return MatrixFilter.isSquareMatrixFilter(); } public static <T extends RAbstractVector> CompareFilter<T> dimEq(int dim, int x) { return new CompareFilter<>(CompareFilter.EQ, new CompareFilter.Dim(dim, x)); } public static <T extends RAbstractVector> CompareFilter<T> dimGt(int dim, int x) { return new CompareFilter<>(CompareFilter.GT, new CompareFilter.Dim(dim, x)); } public static CompareFilter<Byte> logicalTrue() { return new CompareFilter<>(CompareFilter.EQ, new CompareFilter.ScalarValue(RRuntime.LOGICAL_TRUE, RType.Logical)); } public static CompareFilter<Byte> logicalFalse() { return new CompareFilter<>(CompareFilter.EQ, new CompareFilter.ScalarValue(RRuntime.LOGICAL_FALSE, RType.Logical)); } public static CompareFilter<Integer> intNA() { return new CompareFilter<>(CompareFilter.EQ, new CompareFilter.NATest(RType.Integer)); } public static NotFilter<Integer> notIntNA() { return new NotFilter<>(intNA()); } public static CompareFilter<Byte> logicalNA() { return new CompareFilter<>(CompareFilter.EQ, new CompareFilter.NATest(RType.Logical)); } public static NotFilter<Byte> notLogicalNA() { return new NotFilter<>(logicalNA()); } public static CompareFilter<Double> doubleNA() { return new CompareFilter<>(CompareFilter.EQ, new CompareFilter.NATest(RType.Double)); } public static NotFilter<Double> notDoubleNA() { return new NotFilter<>(doubleNA()); } public static CompareFilter<String> stringNA() { return new CompareFilter<>(CompareFilter.EQ, new CompareFilter.NATest(RType.Character)); } public static NotFilter<String> notStringNA() { return new NotFilter<>(stringNA()); } public static CompareFilter<RComplex> complexNA() { return new CompareFilter<>(CompareFilter.EQ, new CompareFilter.NATest(RType.Complex)); } public static NotFilter<RComplex> notComplexNA() { return new NotFilter<>(complexNA()); } public static DoubleFilter isFractional() { return DoubleFilter.IS_FRACTIONAL; } public static DoubleFilter isFinite() { return DoubleFilter.IS_FINITE; } public static CompareFilter<Integer> eq(int x) { return new CompareFilter<>(CompareFilter.EQ, new CompareFilter.ScalarValue(x, RType.Integer)); } public static CompareFilter<Double> eq(double x) { return new CompareFilter<>(CompareFilter.EQ, new CompareFilter.ScalarValue(x, RType.Double)); } public static CompareFilter<Byte> eq(byte x) { return new CompareFilter<>(CompareFilter.EQ, new CompareFilter.ScalarValue(x, RType.Logical)); } public static CompareFilter<String> eq(String x) { return new CompareFilter<>(CompareFilter.STRING_EQ, new CompareFilter.ScalarValue(x, RType.Character)); } public static NotFilter<Integer> neq(int x) { return new NotFilter<>(eq(x)); } public static NotFilter<Double> neq(double x) { return new NotFilter<>(eq(x)); } public static CompareFilter<Integer> gt(int x) { return new CompareFilter<>(CompareFilter.GT, new CompareFilter.ScalarValue(x, RType.Integer)); } public static CompareFilter<Double> gt(double x) { return new CompareFilter<>(CompareFilter.GT, new CompareFilter.ScalarValue(x, RType.Double)); } public static CompareFilter<Integer> gte(int x) { return new CompareFilter<>(CompareFilter.GE, new CompareFilter.ScalarValue(x, RType.Integer)); } public static CompareFilter<Double> gte(double x) { return new CompareFilter<>(CompareFilter.GE, new CompareFilter.ScalarValue(x, RType.Double)); } public static CompareFilter<Integer> lt(int x) { return new CompareFilter<>(CompareFilter.LT, new CompareFilter.ScalarValue(x, RType.Integer)); } public static CompareFilter<Double> lt(double x) { return new CompareFilter<>(CompareFilter.LT, new CompareFilter.ScalarValue(x, RType.Double)); } public static CompareFilter<Integer> lte(int x) { return new CompareFilter<>(CompareFilter.LE, new CompareFilter.ScalarValue(x, RType.Integer)); } public static CompareFilter<Double> lte(double x) { return new CompareFilter<>(CompareFilter.LE, new CompareFilter.ScalarValue(x, RType.Double)); } public static CompareFilter<String> length(int l) { return new CompareFilter<>(CompareFilter.EQ, new CompareFilter.StringLength(l)); } public static CompareFilter<String> isEmpty() { return length(0); } public static CompareFilter<String> lengthGt(int l) { return new CompareFilter<>(CompareFilter.GT, new CompareFilter.StringLength(l)); } public static CompareFilter<String> lengthGte(int l) { return new CompareFilter<>(CompareFilter.GE, new CompareFilter.StringLength(l)); } public static CompareFilter<String> lengthLt(int l) { return new CompareFilter<>(CompareFilter.LT, new CompareFilter.StringLength(l)); } public static CompareFilter<String> lengthLte(int l) { return new CompareFilter<>(CompareFilter.LE, new CompareFilter.StringLength(l)); } public static CompareFilter<Integer> gt0() { return gt(0); } public static CompareFilter<Integer> gte0() { return gte(0); } public static CompareFilter<Integer> gt1() { return gt(1); } public static CompareFilter<Integer> gte1() { return gte(1); } public static <R> TypeFilter<Object, R> instanceOf(Class<R> cls) { return new TypeFilter<>(cls); } public static TypeFilter<Object, RFunction> builtin() { return new TypeFilter<>(RFunction.class, x -> x.isBuiltin()); } public static TypeFilter<Object, TruffleObject> foreign() { return new TypeFilter<>(TruffleObject.class, x -> RRuntime.isForeignObject(x)); } public static Filter<Object, RIntVector> integerValue() { return new RTypeFilter<>(RType.Integer); } public static Filter<Object, RStringVector> stringValue() { return new RTypeFilter<>(RType.Character); } public static Filter<Object, RDoubleVector> doubleValue() { return new RTypeFilter<>(RType.Double); } public static Filter<Object, RLogicalVector> logicalValue() { return new RTypeFilter<>(RType.Logical); } @SuppressWarnings({"rawtypes", "unchecked"}) public static <R extends Object> Filter<Object, R> complexValue() { return (Filter) complexVector().or(instanceOf(RComplex.class)); } public static Filter<Object, RComplexVector> complexVector() { return new RTypeFilter<>(RType.Complex); } @SuppressWarnings({"rawtypes", "unchecked"}) public static <R extends Object> Filter<Object, R> rawValue() { return (Filter) rawVector().or(instanceOf(RRaw.class)); } public static Filter<Object, RRawVector> rawVector() { return new RTypeFilter<>(RType.Raw); } public static TypeFilter<Object, Object> anyValue() { return new TypeFilter<>(Object.class); } /** * Valid {@link RArgsValuesAndNames} do not contain * {@link com.oracle.truffle.r.runtime.data.REmpty} values. */ @SuppressWarnings({"rawtypes", "unchecked"}) public static Filter<Object, RArgsValuesAndNames> validVarArgs() { return (Filter) integerValue().or(doubleValue()).or(logicalValue()); } @SuppressWarnings({"rawtypes", "unchecked"}) public static Filter<Object, RAbstractVector> numericValue() { return (Filter) integerValue().or(doubleValue()).or(logicalValue()); } /** * Checks that the argument is a list or vector/scalar of type numeric, string, complex or * raw. */ @SuppressWarnings({"rawtypes", "unchecked"}) public static Filter<Object, RAbstractVector> abstractVectorValue() { return (Filter) numericValue().or(stringValue()).or(complexValue()).or(rawValue()).or(instanceOf(RAbstractListVector.class)); } public static Filter<Object, Integer> atomicIntegerValue() { return new TypeFilter<>(Integer.class); } public static Filter<Object, Byte> atomicLogicalValue() { return new TypeFilter<>(Byte.class); } public static MapByteToBoolean toBoolean() { return new MapByteToBoolean(true); } public static MapByteToBoolean toBoolean(boolean naReplacement) { return new MapByteToBoolean(naReplacement); } public static MapDoubleToInt doubleToInt() { return MapDoubleToInt.INSTANCE; } public static MapToCharAt charAt0(int defaultValue) { return new MapToCharAt(0, defaultValue); } public static <T> MapToValue<T, RNull> nullConstant() { return new MapToValue<>(RNull.instance); } public static <T> MapToValue<T, RMissing> missingConstant() { return new MapToValue<>(RMissing.instance); } public static <T> MapToValue<T, String> constant(String s) { return new MapToValue<>(s); } public static <T> MapToValue<T, Integer> constant(int i) { return new MapToValue<>(i); } public static <T> MapToValue<T, Double> constant(double d) { return new MapToValue<>(d); } public static <T> MapToValue<T, Byte> constant(byte l) { return new MapToValue<>(l); } public static <T> MapToValue<T, RIntVector> emptyIntegerVector() { return new MapToValue<>(RDataFactory.createEmptyIntVector()); } public static <T> MapToValue<T, RDoubleVector> emptyDoubleVector() { return new MapToValue<>(RDataFactory.createEmptyDoubleVector()); } public static <T> MapToValue<T, RLogicalVector> emptyLogicalVector() { return new MapToValue<>(RDataFactory.createEmptyLogicalVector()); } public static <T> MapToValue<T, RComplexVector> emptyComplexVector() { return new MapToValue<>(RDataFactory.createEmptyComplexVector()); } public static <T> MapToValue<T, RStringVector> emptyStringVector() { return new MapToValue<>(RDataFactory.createEmptyStringVector()); } public static <T> MapToValue<T, RList> emptyList() { return new MapToValue<>(RDataFactory.createList()); } /** * The function returned by this method is typically used as an error message argument. * * @return a function returning the type name of its argument */ public static Function<Object, String> typeName() { CompilerAsserts.neverPartOfCompilation(); return arg -> RRuntime.getRTypeName(arg); } @TruffleBoundary public static String getTypeName(Object arg) { return RRuntime.getRTypeName(arg); } } }
googleapis/google-cloud-java
34,997
java-datacatalog/proto-google-cloud-datacatalog-v1/src/main/java/com/google/cloud/datacatalog/v1/CloudSqlBigQueryConnectionSpec.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/datacatalog/v1/bigquery.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.datacatalog.v1; /** * * * <pre> * Specification for the BigQuery connection to a Cloud SQL instance. * </pre> * * Protobuf type {@code google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec} */ public final class CloudSqlBigQueryConnectionSpec extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec) CloudSqlBigQueryConnectionSpecOrBuilder { private static final long serialVersionUID = 0L; // Use CloudSqlBigQueryConnectionSpec.newBuilder() to construct. private CloudSqlBigQueryConnectionSpec( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CloudSqlBigQueryConnectionSpec() { instanceId_ = ""; database_ = ""; type_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CloudSqlBigQueryConnectionSpec(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datacatalog.v1.BigQueryProto .internal_static_google_cloud_datacatalog_v1_CloudSqlBigQueryConnectionSpec_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datacatalog.v1.BigQueryProto .internal_static_google_cloud_datacatalog_v1_CloudSqlBigQueryConnectionSpec_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec.class, com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec.Builder.class); } /** * * * <pre> * Supported Cloud SQL database types. * </pre> * * Protobuf enum {@code google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec.DatabaseType} */ public enum DatabaseType implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * Unspecified database type. * </pre> * * <code>DATABASE_TYPE_UNSPECIFIED = 0;</code> */ DATABASE_TYPE_UNSPECIFIED(0), /** * * * <pre> * Cloud SQL for PostgreSQL. * </pre> * * <code>POSTGRES = 1;</code> */ POSTGRES(1), /** * * * <pre> * Cloud SQL for MySQL. * </pre> * * <code>MYSQL = 2;</code> */ MYSQL(2), UNRECOGNIZED(-1), ; /** * * * <pre> * Unspecified database type. * </pre> * * <code>DATABASE_TYPE_UNSPECIFIED = 0;</code> */ public static final int DATABASE_TYPE_UNSPECIFIED_VALUE = 0; /** * * * <pre> * Cloud SQL for PostgreSQL. * </pre> * * <code>POSTGRES = 1;</code> */ public static final int POSTGRES_VALUE = 1; /** * * * <pre> * Cloud SQL for MySQL. * </pre> * * <code>MYSQL = 2;</code> */ public static final int MYSQL_VALUE = 2; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static DatabaseType valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static DatabaseType forNumber(int value) { switch (value) { case 0: return DATABASE_TYPE_UNSPECIFIED; case 1: return POSTGRES; case 2: return MYSQL; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<DatabaseType> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<DatabaseType> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<DatabaseType>() { public DatabaseType findValueByNumber(int number) { return DatabaseType.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec.getDescriptor() .getEnumTypes() .get(0); } private static final DatabaseType[] VALUES = values(); public static DatabaseType valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private DatabaseType(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec.DatabaseType) } public static final int INSTANCE_ID_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object instanceId_ = ""; /** * * * <pre> * Cloud SQL instance ID in the format of `project:location:instance`. * </pre> * * <code>string instance_id = 1;</code> * * @return The instanceId. */ @java.lang.Override public java.lang.String getInstanceId() { java.lang.Object ref = instanceId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); instanceId_ = s; return s; } } /** * * * <pre> * Cloud SQL instance ID in the format of `project:location:instance`. * </pre> * * <code>string instance_id = 1;</code> * * @return The bytes for instanceId. */ @java.lang.Override public com.google.protobuf.ByteString getInstanceIdBytes() { java.lang.Object ref = instanceId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); instanceId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int DATABASE_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object database_ = ""; /** * * * <pre> * Database name. * </pre> * * <code>string database = 2;</code> * * @return The database. */ @java.lang.Override public java.lang.String getDatabase() { java.lang.Object ref = database_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); database_ = s; return s; } } /** * * * <pre> * Database name. * </pre> * * <code>string database = 2;</code> * * @return The bytes for database. */ @java.lang.Override public com.google.protobuf.ByteString getDatabaseBytes() { java.lang.Object ref = database_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); database_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int TYPE_FIELD_NUMBER = 3; private int type_ = 0; /** * * * <pre> * Type of the Cloud SQL database. * </pre> * * <code>.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec.DatabaseType type = 3;</code> * * @return The enum numeric value on the wire for type. */ @java.lang.Override public int getTypeValue() { return type_; } /** * * * <pre> * Type of the Cloud SQL database. * </pre> * * <code>.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec.DatabaseType type = 3;</code> * * @return The type. */ @java.lang.Override public com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec.DatabaseType getType() { com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec.DatabaseType result = com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec.DatabaseType.forNumber( type_); return result == null ? com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec.DatabaseType.UNRECOGNIZED : result; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, instanceId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(database_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, database_); } if (type_ != com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec.DatabaseType .DATABASE_TYPE_UNSPECIFIED .getNumber()) { output.writeEnum(3, type_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, instanceId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(database_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, database_); } if (type_ != com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec.DatabaseType .DATABASE_TYPE_UNSPECIFIED .getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(3, type_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec)) { return super.equals(obj); } com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec other = (com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec) obj; if (!getInstanceId().equals(other.getInstanceId())) return false; if (!getDatabase().equals(other.getDatabase())) return false; if (type_ != other.type_) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + INSTANCE_ID_FIELD_NUMBER; hash = (53 * hash) + getInstanceId().hashCode(); hash = (37 * hash) + DATABASE_FIELD_NUMBER; hash = (53 * hash) + getDatabase().hashCode(); hash = (37 * hash) + TYPE_FIELD_NUMBER; hash = (53 * hash) + type_; hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Specification for the BigQuery connection to a Cloud SQL instance. * </pre> * * Protobuf type {@code google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec) com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpecOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datacatalog.v1.BigQueryProto .internal_static_google_cloud_datacatalog_v1_CloudSqlBigQueryConnectionSpec_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datacatalog.v1.BigQueryProto .internal_static_google_cloud_datacatalog_v1_CloudSqlBigQueryConnectionSpec_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec.class, com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec.Builder.class); } // Construct using com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; instanceId_ = ""; database_ = ""; type_ = 0; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.datacatalog.v1.BigQueryProto .internal_static_google_cloud_datacatalog_v1_CloudSqlBigQueryConnectionSpec_descriptor; } @java.lang.Override public com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec getDefaultInstanceForType() { return com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec.getDefaultInstance(); } @java.lang.Override public com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec build() { com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec buildPartial() { com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec result = new com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.instanceId_ = instanceId_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.database_ = database_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.type_ = type_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec) { return mergeFrom((com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec other) { if (other == com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec.getDefaultInstance()) return this; if (!other.getInstanceId().isEmpty()) { instanceId_ = other.instanceId_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getDatabase().isEmpty()) { database_ = other.database_; bitField0_ |= 0x00000002; onChanged(); } if (other.type_ != 0) { setTypeValue(other.getTypeValue()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { instanceId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { database_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { type_ = input.readEnum(); bitField0_ |= 0x00000004; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object instanceId_ = ""; /** * * * <pre> * Cloud SQL instance ID in the format of `project:location:instance`. * </pre> * * <code>string instance_id = 1;</code> * * @return The instanceId. */ public java.lang.String getInstanceId() { java.lang.Object ref = instanceId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); instanceId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Cloud SQL instance ID in the format of `project:location:instance`. * </pre> * * <code>string instance_id = 1;</code> * * @return The bytes for instanceId. */ public com.google.protobuf.ByteString getInstanceIdBytes() { java.lang.Object ref = instanceId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); instanceId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Cloud SQL instance ID in the format of `project:location:instance`. * </pre> * * <code>string instance_id = 1;</code> * * @param value The instanceId to set. * @return This builder for chaining. */ public Builder setInstanceId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } instanceId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Cloud SQL instance ID in the format of `project:location:instance`. * </pre> * * <code>string instance_id = 1;</code> * * @return This builder for chaining. */ public Builder clearInstanceId() { instanceId_ = getDefaultInstance().getInstanceId(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Cloud SQL instance ID in the format of `project:location:instance`. * </pre> * * <code>string instance_id = 1;</code> * * @param value The bytes for instanceId to set. * @return This builder for chaining. */ public Builder setInstanceIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); instanceId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object database_ = ""; /** * * * <pre> * Database name. * </pre> * * <code>string database = 2;</code> * * @return The database. */ public java.lang.String getDatabase() { java.lang.Object ref = database_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); database_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Database name. * </pre> * * <code>string database = 2;</code> * * @return The bytes for database. */ public com.google.protobuf.ByteString getDatabaseBytes() { java.lang.Object ref = database_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); database_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Database name. * </pre> * * <code>string database = 2;</code> * * @param value The database to set. * @return This builder for chaining. */ public Builder setDatabase(java.lang.String value) { if (value == null) { throw new NullPointerException(); } database_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Database name. * </pre> * * <code>string database = 2;</code> * * @return This builder for chaining. */ public Builder clearDatabase() { database_ = getDefaultInstance().getDatabase(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Database name. * </pre> * * <code>string database = 2;</code> * * @param value The bytes for database to set. * @return This builder for chaining. */ public Builder setDatabaseBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); database_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private int type_ = 0; /** * * * <pre> * Type of the Cloud SQL database. * </pre> * * <code>.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec.DatabaseType type = 3; * </code> * * @return The enum numeric value on the wire for type. */ @java.lang.Override public int getTypeValue() { return type_; } /** * * * <pre> * Type of the Cloud SQL database. * </pre> * * <code>.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec.DatabaseType type = 3; * </code> * * @param value The enum numeric value on the wire for type to set. * @return This builder for chaining. */ public Builder setTypeValue(int value) { type_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Type of the Cloud SQL database. * </pre> * * <code>.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec.DatabaseType type = 3; * </code> * * @return The type. */ @java.lang.Override public com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec.DatabaseType getType() { com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec.DatabaseType result = com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec.DatabaseType.forNumber( type_); return result == null ? com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec.DatabaseType.UNRECOGNIZED : result; } /** * * * <pre> * Type of the Cloud SQL database. * </pre> * * <code>.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec.DatabaseType type = 3; * </code> * * @param value The type to set. * @return This builder for chaining. */ public Builder setType( com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec.DatabaseType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; type_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Type of the Cloud SQL database. * </pre> * * <code>.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec.DatabaseType type = 3; * </code> * * @return This builder for chaining. */ public Builder clearType() { bitField0_ = (bitField0_ & ~0x00000004); type_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec) } // @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec) private static final com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec(); } public static com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CloudSqlBigQueryConnectionSpec> PARSER = new com.google.protobuf.AbstractParser<CloudSqlBigQueryConnectionSpec>() { @java.lang.Override public CloudSqlBigQueryConnectionSpec parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CloudSqlBigQueryConnectionSpec> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CloudSqlBigQueryConnectionSpec> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
34,940
java-gsuite-addons/proto-google-apps-script-type-protos/src/main/java/com/google/apps/script/type/UniversalActionExtensionPoint.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/apps/script/type/extension_point.proto // Protobuf Java Version: 3.25.8 package com.google.apps.script.type; /** * * * <pre> * Format for declaring a universal action menu item extension point. * </pre> * * Protobuf type {@code google.apps.script.type.UniversalActionExtensionPoint} */ public final class UniversalActionExtensionPoint extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.apps.script.type.UniversalActionExtensionPoint) UniversalActionExtensionPointOrBuilder { private static final long serialVersionUID = 0L; // Use UniversalActionExtensionPoint.newBuilder() to construct. private UniversalActionExtensionPoint(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UniversalActionExtensionPoint() { label_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UniversalActionExtensionPoint(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.apps.script.type.ExtensionPoint .internal_static_google_apps_script_type_UniversalActionExtensionPoint_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.apps.script.type.ExtensionPoint .internal_static_google_apps_script_type_UniversalActionExtensionPoint_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.apps.script.type.UniversalActionExtensionPoint.class, com.google.apps.script.type.UniversalActionExtensionPoint.Builder.class); } private int actionTypeCase_ = 0; @SuppressWarnings("serial") private java.lang.Object actionType_; public enum ActionTypeCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { OPEN_LINK(2), RUN_FUNCTION(3), ACTIONTYPE_NOT_SET(0); private final int value; private ActionTypeCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static ActionTypeCase valueOf(int value) { return forNumber(value); } public static ActionTypeCase forNumber(int value) { switch (value) { case 2: return OPEN_LINK; case 3: return RUN_FUNCTION; case 0: return ACTIONTYPE_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public ActionTypeCase getActionTypeCase() { return ActionTypeCase.forNumber(actionTypeCase_); } public static final int LABEL_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object label_ = ""; /** * * * <pre> * Required. User-visible text describing the action taken by activating this * extension point, for example, "Add a new contact". * </pre> * * <code>string label = 1;</code> * * @return The label. */ @java.lang.Override public java.lang.String getLabel() { java.lang.Object ref = label_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); label_ = s; return s; } } /** * * * <pre> * Required. User-visible text describing the action taken by activating this * extension point, for example, "Add a new contact". * </pre> * * <code>string label = 1;</code> * * @return The bytes for label. */ @java.lang.Override public com.google.protobuf.ByteString getLabelBytes() { java.lang.Object ref = label_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); label_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int OPEN_LINK_FIELD_NUMBER = 2; /** * * * <pre> * URL to be opened by the UniversalAction. * </pre> * * <code>string open_link = 2;</code> * * @return Whether the openLink field is set. */ public boolean hasOpenLink() { return actionTypeCase_ == 2; } /** * * * <pre> * URL to be opened by the UniversalAction. * </pre> * * <code>string open_link = 2;</code> * * @return The openLink. */ public java.lang.String getOpenLink() { java.lang.Object ref = ""; if (actionTypeCase_ == 2) { ref = actionType_; } if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (actionTypeCase_ == 2) { actionType_ = s; } return s; } } /** * * * <pre> * URL to be opened by the UniversalAction. * </pre> * * <code>string open_link = 2;</code> * * @return The bytes for openLink. */ public com.google.protobuf.ByteString getOpenLinkBytes() { java.lang.Object ref = ""; if (actionTypeCase_ == 2) { ref = actionType_; } if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); if (actionTypeCase_ == 2) { actionType_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int RUN_FUNCTION_FIELD_NUMBER = 3; /** * * * <pre> * Endpoint to be run by the UniversalAction. * </pre> * * <code>string run_function = 3;</code> * * @return Whether the runFunction field is set. */ public boolean hasRunFunction() { return actionTypeCase_ == 3; } /** * * * <pre> * Endpoint to be run by the UniversalAction. * </pre> * * <code>string run_function = 3;</code> * * @return The runFunction. */ public java.lang.String getRunFunction() { java.lang.Object ref = ""; if (actionTypeCase_ == 3) { ref = actionType_; } if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (actionTypeCase_ == 3) { actionType_ = s; } return s; } } /** * * * <pre> * Endpoint to be run by the UniversalAction. * </pre> * * <code>string run_function = 3;</code> * * @return The bytes for runFunction. */ public com.google.protobuf.ByteString getRunFunctionBytes() { java.lang.Object ref = ""; if (actionTypeCase_ == 3) { ref = actionType_; } if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); if (actionTypeCase_ == 3) { actionType_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(label_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, label_); } if (actionTypeCase_ == 2) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, actionType_); } if (actionTypeCase_ == 3) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, actionType_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(label_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, label_); } if (actionTypeCase_ == 2) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, actionType_); } if (actionTypeCase_ == 3) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, actionType_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.apps.script.type.UniversalActionExtensionPoint)) { return super.equals(obj); } com.google.apps.script.type.UniversalActionExtensionPoint other = (com.google.apps.script.type.UniversalActionExtensionPoint) obj; if (!getLabel().equals(other.getLabel())) return false; if (!getActionTypeCase().equals(other.getActionTypeCase())) return false; switch (actionTypeCase_) { case 2: if (!getOpenLink().equals(other.getOpenLink())) return false; break; case 3: if (!getRunFunction().equals(other.getRunFunction())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + LABEL_FIELD_NUMBER; hash = (53 * hash) + getLabel().hashCode(); switch (actionTypeCase_) { case 2: hash = (37 * hash) + OPEN_LINK_FIELD_NUMBER; hash = (53 * hash) + getOpenLink().hashCode(); break; case 3: hash = (37 * hash) + RUN_FUNCTION_FIELD_NUMBER; hash = (53 * hash) + getRunFunction().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.apps.script.type.UniversalActionExtensionPoint parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.apps.script.type.UniversalActionExtensionPoint parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.apps.script.type.UniversalActionExtensionPoint parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.apps.script.type.UniversalActionExtensionPoint parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.apps.script.type.UniversalActionExtensionPoint parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.apps.script.type.UniversalActionExtensionPoint parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.apps.script.type.UniversalActionExtensionPoint parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.apps.script.type.UniversalActionExtensionPoint parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.apps.script.type.UniversalActionExtensionPoint parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.apps.script.type.UniversalActionExtensionPoint parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.apps.script.type.UniversalActionExtensionPoint parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.apps.script.type.UniversalActionExtensionPoint parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.apps.script.type.UniversalActionExtensionPoint prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Format for declaring a universal action menu item extension point. * </pre> * * Protobuf type {@code google.apps.script.type.UniversalActionExtensionPoint} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.apps.script.type.UniversalActionExtensionPoint) com.google.apps.script.type.UniversalActionExtensionPointOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.apps.script.type.ExtensionPoint .internal_static_google_apps_script_type_UniversalActionExtensionPoint_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.apps.script.type.ExtensionPoint .internal_static_google_apps_script_type_UniversalActionExtensionPoint_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.apps.script.type.UniversalActionExtensionPoint.class, com.google.apps.script.type.UniversalActionExtensionPoint.Builder.class); } // Construct using com.google.apps.script.type.UniversalActionExtensionPoint.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; label_ = ""; actionTypeCase_ = 0; actionType_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.apps.script.type.ExtensionPoint .internal_static_google_apps_script_type_UniversalActionExtensionPoint_descriptor; } @java.lang.Override public com.google.apps.script.type.UniversalActionExtensionPoint getDefaultInstanceForType() { return com.google.apps.script.type.UniversalActionExtensionPoint.getDefaultInstance(); } @java.lang.Override public com.google.apps.script.type.UniversalActionExtensionPoint build() { com.google.apps.script.type.UniversalActionExtensionPoint result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.apps.script.type.UniversalActionExtensionPoint buildPartial() { com.google.apps.script.type.UniversalActionExtensionPoint result = new com.google.apps.script.type.UniversalActionExtensionPoint(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.apps.script.type.UniversalActionExtensionPoint result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.label_ = label_; } } private void buildPartialOneofs( com.google.apps.script.type.UniversalActionExtensionPoint result) { result.actionTypeCase_ = actionTypeCase_; result.actionType_ = this.actionType_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.apps.script.type.UniversalActionExtensionPoint) { return mergeFrom((com.google.apps.script.type.UniversalActionExtensionPoint) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.apps.script.type.UniversalActionExtensionPoint other) { if (other == com.google.apps.script.type.UniversalActionExtensionPoint.getDefaultInstance()) return this; if (!other.getLabel().isEmpty()) { label_ = other.label_; bitField0_ |= 0x00000001; onChanged(); } switch (other.getActionTypeCase()) { case OPEN_LINK: { actionTypeCase_ = 2; actionType_ = other.actionType_; onChanged(); break; } case RUN_FUNCTION: { actionTypeCase_ = 3; actionType_ = other.actionType_; onChanged(); break; } case ACTIONTYPE_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { label_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { java.lang.String s = input.readStringRequireUtf8(); actionTypeCase_ = 2; actionType_ = s; break; } // case 18 case 26: { java.lang.String s = input.readStringRequireUtf8(); actionTypeCase_ = 3; actionType_ = s; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int actionTypeCase_ = 0; private java.lang.Object actionType_; public ActionTypeCase getActionTypeCase() { return ActionTypeCase.forNumber(actionTypeCase_); } public Builder clearActionType() { actionTypeCase_ = 0; actionType_ = null; onChanged(); return this; } private int bitField0_; private java.lang.Object label_ = ""; /** * * * <pre> * Required. User-visible text describing the action taken by activating this * extension point, for example, "Add a new contact". * </pre> * * <code>string label = 1;</code> * * @return The label. */ public java.lang.String getLabel() { java.lang.Object ref = label_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); label_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. User-visible text describing the action taken by activating this * extension point, for example, "Add a new contact". * </pre> * * <code>string label = 1;</code> * * @return The bytes for label. */ public com.google.protobuf.ByteString getLabelBytes() { java.lang.Object ref = label_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); label_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. User-visible text describing the action taken by activating this * extension point, for example, "Add a new contact". * </pre> * * <code>string label = 1;</code> * * @param value The label to set. * @return This builder for chaining. */ public Builder setLabel(java.lang.String value) { if (value == null) { throw new NullPointerException(); } label_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. User-visible text describing the action taken by activating this * extension point, for example, "Add a new contact". * </pre> * * <code>string label = 1;</code> * * @return This builder for chaining. */ public Builder clearLabel() { label_ = getDefaultInstance().getLabel(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. User-visible text describing the action taken by activating this * extension point, for example, "Add a new contact". * </pre> * * <code>string label = 1;</code> * * @param value The bytes for label to set. * @return This builder for chaining. */ public Builder setLabelBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); label_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * URL to be opened by the UniversalAction. * </pre> * * <code>string open_link = 2;</code> * * @return Whether the openLink field is set. */ @java.lang.Override public boolean hasOpenLink() { return actionTypeCase_ == 2; } /** * * * <pre> * URL to be opened by the UniversalAction. * </pre> * * <code>string open_link = 2;</code> * * @return The openLink. */ @java.lang.Override public java.lang.String getOpenLink() { java.lang.Object ref = ""; if (actionTypeCase_ == 2) { ref = actionType_; } if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (actionTypeCase_ == 2) { actionType_ = s; } return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * URL to be opened by the UniversalAction. * </pre> * * <code>string open_link = 2;</code> * * @return The bytes for openLink. */ @java.lang.Override public com.google.protobuf.ByteString getOpenLinkBytes() { java.lang.Object ref = ""; if (actionTypeCase_ == 2) { ref = actionType_; } if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); if (actionTypeCase_ == 2) { actionType_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * URL to be opened by the UniversalAction. * </pre> * * <code>string open_link = 2;</code> * * @param value The openLink to set. * @return This builder for chaining. */ public Builder setOpenLink(java.lang.String value) { if (value == null) { throw new NullPointerException(); } actionTypeCase_ = 2; actionType_ = value; onChanged(); return this; } /** * * * <pre> * URL to be opened by the UniversalAction. * </pre> * * <code>string open_link = 2;</code> * * @return This builder for chaining. */ public Builder clearOpenLink() { if (actionTypeCase_ == 2) { actionTypeCase_ = 0; actionType_ = null; onChanged(); } return this; } /** * * * <pre> * URL to be opened by the UniversalAction. * </pre> * * <code>string open_link = 2;</code> * * @param value The bytes for openLink to set. * @return This builder for chaining. */ public Builder setOpenLinkBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); actionTypeCase_ = 2; actionType_ = value; onChanged(); return this; } /** * * * <pre> * Endpoint to be run by the UniversalAction. * </pre> * * <code>string run_function = 3;</code> * * @return Whether the runFunction field is set. */ @java.lang.Override public boolean hasRunFunction() { return actionTypeCase_ == 3; } /** * * * <pre> * Endpoint to be run by the UniversalAction. * </pre> * * <code>string run_function = 3;</code> * * @return The runFunction. */ @java.lang.Override public java.lang.String getRunFunction() { java.lang.Object ref = ""; if (actionTypeCase_ == 3) { ref = actionType_; } if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (actionTypeCase_ == 3) { actionType_ = s; } return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Endpoint to be run by the UniversalAction. * </pre> * * <code>string run_function = 3;</code> * * @return The bytes for runFunction. */ @java.lang.Override public com.google.protobuf.ByteString getRunFunctionBytes() { java.lang.Object ref = ""; if (actionTypeCase_ == 3) { ref = actionType_; } if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); if (actionTypeCase_ == 3) { actionType_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Endpoint to be run by the UniversalAction. * </pre> * * <code>string run_function = 3;</code> * * @param value The runFunction to set. * @return This builder for chaining. */ public Builder setRunFunction(java.lang.String value) { if (value == null) { throw new NullPointerException(); } actionTypeCase_ = 3; actionType_ = value; onChanged(); return this; } /** * * * <pre> * Endpoint to be run by the UniversalAction. * </pre> * * <code>string run_function = 3;</code> * * @return This builder for chaining. */ public Builder clearRunFunction() { if (actionTypeCase_ == 3) { actionTypeCase_ = 0; actionType_ = null; onChanged(); } return this; } /** * * * <pre> * Endpoint to be run by the UniversalAction. * </pre> * * <code>string run_function = 3;</code> * * @param value The bytes for runFunction to set. * @return This builder for chaining. */ public Builder setRunFunctionBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); actionTypeCase_ = 3; actionType_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.apps.script.type.UniversalActionExtensionPoint) } // @@protoc_insertion_point(class_scope:google.apps.script.type.UniversalActionExtensionPoint) private static final com.google.apps.script.type.UniversalActionExtensionPoint DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.apps.script.type.UniversalActionExtensionPoint(); } public static com.google.apps.script.type.UniversalActionExtensionPoint getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UniversalActionExtensionPoint> PARSER = new com.google.protobuf.AbstractParser<UniversalActionExtensionPoint>() { @java.lang.Override public UniversalActionExtensionPoint parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UniversalActionExtensionPoint> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UniversalActionExtensionPoint> getParserForType() { return PARSER; } @java.lang.Override public com.google.apps.script.type.UniversalActionExtensionPoint getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/jackrabbit-oak
35,076
oak-auth-external/src/test/java/org/apache/jackrabbit/oak/spi/security/authentication/external/impl/jmx/SyncMBeanImplTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.spi.security.authentication.external.impl.jmx; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Set; import javax.jcr.RepositoryException; import javax.jcr.ValueFactory; import org.apache.jackrabbit.api.security.user.Authorizable; import org.apache.jackrabbit.api.security.user.Group; import org.apache.jackrabbit.api.security.user.User; import org.apache.jackrabbit.api.security.user.UserManager; import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalGroup; import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalIdentity; import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalIdentityException; import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalIdentityProvider; import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalIdentityProviderManager; import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalIdentityRef; import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalUser; import org.apache.jackrabbit.oak.spi.security.authentication.external.SyncContext; import org.apache.jackrabbit.oak.spi.security.authentication.external.SyncException; import org.apache.jackrabbit.oak.spi.security.authentication.external.SyncManager; import org.apache.jackrabbit.oak.spi.security.authentication.external.SyncResult; import org.apache.jackrabbit.oak.spi.security.authentication.external.SyncedIdentity; import org.apache.jackrabbit.oak.spi.security.authentication.external.TestIdentityProvider; import org.apache.jackrabbit.oak.spi.security.authentication.external.basic.DefaultSyncContext; import org.apache.jackrabbit.oak.spi.security.authentication.external.basic.DefaultSyncResultImpl; import org.apache.jackrabbit.oak.spi.security.authentication.external.impl.DefaultSyncHandler; import org.apache.jackrabbit.oak.spi.security.user.UserConstants; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.junit.Before; import org.junit.Test; import static org.apache.jackrabbit.oak.spi.security.authentication.external.TestIdentityProvider.ID_TEST_USER; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public class SyncMBeanImplTest extends AbstractJmxTest { private static final String SYNC_NAME = "testSyncName"; private SyncMBeanImpl syncMBean; private SyncManager syncMgr; private ExternalIdentityProviderManager idpMgr; @Before public void before() throws Exception { super.before(); syncMgr = name -> { if (SYNC_NAME.equals(name)) { return new DefaultSyncHandler(syncConfig); } else if (ThrowingSyncHandler.NAME.equals(name)) { return new ThrowingSyncHandler(false); } else if (ThrowingSyncHandler.NAME_ALLOWS_IDENTITY_LISTING.equals(name)) { return new ThrowingSyncHandler(true); } else { return null; } }; idpMgr = name -> { if (name.equals(idp.getName())) { return idp; } else { return null; } }; syncMBean = createSyncMBeanImpl(SYNC_NAME, idp.getName()); } private Map<String, String> getExpectedUserResult(String expectedOp, boolean includeGroups) throws ExternalIdentityException { Map<String, String> expected = new HashMap<>(); Iterator<ExternalUser> it = idp.listUsers(); while (it.hasNext()) { ExternalUser eu = it.next(); expected.put(eu.getId(), expectedOp); if (includeGroups) { for (ExternalIdentityRef ref : eu.getDeclaredGroups()) { expected.put(ref.getId(), expectedOp); } } } return expected; } private SyncMBeanImpl createSyncMBeanImpl(@NotNull String syncHandlerName, @NotNull String idpName) { return new SyncMBeanImpl(getContentRepository(), getSecurityProvider(), syncMgr, syncHandlerName, idpMgr, idpName); } private SyncMBeanImpl createThrowingSyncMBean(boolean allowListIdentities) { String name = (allowListIdentities) ? ThrowingSyncHandler.NAME_ALLOWS_IDENTITY_LISTING : ThrowingSyncHandler.NAME; return new SyncMBeanImpl(getContentRepository(), getSecurityProvider(), syncMgr, name, idpMgr, idp.getName()); } @Test public void testGetSyncHandlerName() { assertEquals(SYNC_NAME, syncMBean.getSyncHandlerName()); } @Test public void testInvalidSyncHandlerName() { SyncMBeanImpl syncMBean = createSyncMBeanImpl("invalid", idp.getName()); assertEquals("invalid", syncMBean.getSyncHandlerName()); // calling any sync-operation must fail due to the invalid configuration try { syncMBean.syncAllExternalUsers(); fail("syncAllExternalUsers with invalid SyncHandlerName must fail"); } catch (IllegalArgumentException e) { //success } } @Test public void testGetIDPName() { assertEquals(idp.getName(), syncMBean.getIDPName()); } @Test public void testInvalidIDPName() { SyncMBeanImpl syncMBean = createSyncMBeanImpl(SYNC_NAME, "invalid"); assertEquals("invalid", syncMBean.getIDPName()); // calling any sync-operation must fail due to the invalid configuration try { syncMBean.syncAllExternalUsers(); fail("syncAllExternalUsers with invalid IDP name must fail"); } catch (IllegalArgumentException e) { //success } } /** * test users have never been synced before => result must be NSA */ @Test public void testSyncUsersBefore() { String[] userIds = new String[] {ID_TEST_USER, TestIdentityProvider.ID_SECOND_USER}; String[] result = syncMBean.syncUsers(userIds, false); assertResultMessages(result, Map.of(ID_TEST_USER, "nsa", TestIdentityProvider.ID_SECOND_USER, "nsa")); result = syncMBean.syncUsers(userIds, true); assertResultMessages(result, Map.of(ID_TEST_USER, "nsa", TestIdentityProvider.ID_SECOND_USER, "nsa")); } @Test public void testSyncUsers() throws Exception { sync(idp, ID_TEST_USER, false); String[] userIds = new String[]{ID_TEST_USER, TestIdentityProvider.ID_SECOND_USER}; String[] result = syncMBean.syncUsers(userIds, false); assertResultMessages(result, Map.of(ID_TEST_USER, "upd", TestIdentityProvider.ID_SECOND_USER, "nsa")); result = syncMBean.syncUsers(userIds, true); assertResultMessages(result, Map.of(ID_TEST_USER, "upd", TestIdentityProvider.ID_SECOND_USER, "nsa")); } @Test public void testSyncUsersAlwaysForcesSync() throws Exception { sync(idp, ID_TEST_USER, false); String[] userIds = new String[]{ID_TEST_USER, TestIdentityProvider.ID_SECOND_USER}; syncConfig.user().setExpirationTime(Long.MAX_VALUE); String[]result = syncMBean.syncUsers(userIds, false); assertResultMessages(result, Map.of(ID_TEST_USER, "upd", TestIdentityProvider.ID_SECOND_USER, "nsa")); } @Test public void testSyncGroups() throws Exception { sync(idp, "a", true); Map<String, String> expected = Map.of("a", "upd"); syncConfig.group().setExpirationTime(Long.MAX_VALUE); // force group sync is true by default => exp time is ignored String[] result = syncMBean.syncUsers(expected.keySet().toArray(new String[0]), false); assertResultMessages(result, expected); } @Test public void testSyncUsersPurge() throws Exception { sync(new TestIdentityProvider.TestUser("thirdUser", idp.getName()), idp); sync(new TestIdentityProvider.TestGroup("gr", idp.getName()), idp); UserManager userManager = getUserManager(); Authorizable[] authorizables = new Authorizable[] { userManager.getAuthorizable("thirdUser"), userManager.getAuthorizable("gr") }; for (Authorizable a : authorizables) { String[] ids = new String[]{a.getID()}; String[] result = syncMBean.syncUsers(ids, false); assertResultMessages(result, a.getID(), "mis"); assertNotNull(userManager.getAuthorizable(a.getID())); result = syncMBean.syncUsers(ids, true); assertResultMessages(result, a.getID(), "del"); assertNull(getUserManager().getAuthorizable(a.getID())); } } @Test public void testSyncUsersNonExisting() { String[] result = syncMBean.syncUsers(new String[] {"nonExisting"}, false); assertResultMessages(result, "nonExisting", "nsa"); } @Test public void testSyncUsersLocal() { String[] result = syncMBean.syncUsers(new String[] {UserConstants.DEFAULT_ANONYMOUS_ID}, false); assertResultMessages(result, UserConstants.DEFAULT_ANONYMOUS_ID, "for"); } @Test public void testSyncUsersLocalPurge() throws Exception { String[] result = syncMBean.syncUsers(new String[] {UserConstants.DEFAULT_ANONYMOUS_ID}, true); assertResultMessages(result, UserConstants.DEFAULT_ANONYMOUS_ID, "for"); assertNotNull(getUserManager().getAuthorizable(UserConstants.DEFAULT_ANONYMOUS_ID)); } @Test public void testSyncUsersForeign() throws Exception { // sync user from foreign IDP into the repository SyncResult res = sync(foreignIDP, ID_TEST_USER, false); assertNotNull(getUserManager().getAuthorizable(ID_TEST_USER)); assertEquals(foreignIDP.getUser(ID_TEST_USER).getExternalId(), res.getIdentity().getExternalIdRef()); // syncUsers with testIDP must detect the foreign status String[] result = syncMBean.syncUsers(new String[]{ID_TEST_USER}, false); assertResultMessages(result, ID_TEST_USER, "for"); assertNotNull(getUserManager().getAuthorizable(ID_TEST_USER)); // same expected with 'purge' set to true result = syncMBean.syncUsers(new String[] {ID_TEST_USER}, true); assertResultMessages(result, ID_TEST_USER, "for"); assertNotNull(getUserManager().getAuthorizable(ID_TEST_USER)); } @Test public void testSyncGroupsForeign() throws Exception { // sync user from foreign IDP into the repository SyncResult res = sync(foreignIDP, "a", true); assertNotNull(getUserManager().getAuthorizable("a")); assertEquals(foreignIDP.getGroup("a").getExternalId(), res.getIdentity().getExternalIdRef()); // syncUsers with testIDP must detect the foreign status String[] result = syncMBean.syncUsers(new String[]{"a"}, false); assertResultMessages(result, "a", "for"); assertNotNull(getUserManager().getAuthorizable("a")); // same expected with 'purge' set to true result = syncMBean.syncUsers(new String[] {"a"}, true); assertResultMessages(result, "a", "for"); assertNotNull(getUserManager().getAuthorizable("a")); } /** * @see <a href="https://issues.apache.org/jira/browse/OAK-4360">OAK-4360</a> */ @Test public void testSyncUserException() throws Exception { User u = getUserManager().createUser(TestIdentityProvider.ID_EXCEPTION, null); u.setProperty(DefaultSyncContext.REP_EXTERNAL_ID, getValueFactory().createValue(new ExternalIdentityRef(TestIdentityProvider.ID_EXCEPTION, idp.getName()).getString())); root.commit(); String[] result = syncMBean.syncUsers(new String[]{TestIdentityProvider.ID_EXCEPTION}, false); assertResultMessages(result, TestIdentityProvider.ID_EXCEPTION, "ERR"); } @Test public void testSyncUserThrowingHandler() throws Exception { sync(idp, ID_TEST_USER, false); String[] result = createThrowingSyncMBean(false).syncUsers(new String[]{ID_TEST_USER}, false); assertResultMessages(result, ID_TEST_USER, "ERR"); } @Test public void testInitialSyncExternalUsers() throws Exception { ExternalUser externalUser = idp.getUser(ID_TEST_USER); String[] externalId = new String[] {externalUser.getExternalId().getString()}; String[] result = syncMBean.syncExternalUsers(externalId); assertResultMessages(result, ID_TEST_USER, "add"); UserManager userManager = getUserManager(); User testUser = userManager.getAuthorizable(externalUser.getId(), User.class); assertNotNull(testUser); for (ExternalIdentityRef groupRef : externalUser.getDeclaredGroups()) { assertNotNull(userManager.getAuthorizable(groupRef.getId())); } } @Test public void testInitialSyncExternalUsersNoNesting() throws Exception { syncConfig.user().setMembershipNestingDepth(-1); ExternalUser externalUser = idp.getUser(ID_TEST_USER); String[] externalId = new String[] {externalUser.getExternalId().getString()}; String[] result = syncMBean.syncExternalUsers(externalId); assertResultMessages(result, ID_TEST_USER, "add"); UserManager userManager = getUserManager(); User testUser = userManager.getAuthorizable(externalUser.getId(), User.class); assertNotNull(testUser); for (ExternalIdentityRef groupRef : externalUser.getDeclaredGroups()) { assertNull(userManager.getAuthorizable(groupRef.getId())); } } @Test public void testSyncExternalUsersLastSyncedProperty() throws Exception { ExternalUser externalUser = idp.getUser(ID_TEST_USER); String[] externalId = new String[]{externalUser.getExternalId().getString()}; syncMBean.syncExternalUsers(externalId); UserManager userManager = getUserManager(); User testUser = userManager.getAuthorizable(externalUser.getId(), User.class); long lastSynced = testUser.getProperty(DefaultSyncContext.REP_LAST_SYNCED)[0].getLong(); for (ExternalIdentityRef groupRef : externalUser.getDeclaredGroups()) { Group gr = userManager.getAuthorizable(groupRef.getId(), Group.class); long groupLastSynced = gr.getProperty(DefaultSyncContext.REP_LAST_SYNCED)[0].getLong(); assertEquals(lastSynced, groupLastSynced); } while (System.currentTimeMillis() <= lastSynced) { // wait for system time to move } // default value for forceGroup sync is defined to be 'true' => verify result syncMBean.syncExternalUsers(externalId); userManager = getUserManager(); testUser = userManager.getAuthorizable(externalUser.getId(), User.class); long lastSynced2 = testUser.getProperty(DefaultSyncContext.REP_LAST_SYNCED)[0].getLong(); assertTrue("lastSynced: " + lastSynced + ", lastSynced2: " + lastSynced2, lastSynced < lastSynced2); for (ExternalIdentityRef groupRef : externalUser.getDeclaredGroups()) { Group gr = userManager.getAuthorizable(groupRef.getId(), Group.class); long groupLastSynced = gr.getProperty(DefaultSyncContext.REP_LAST_SYNCED)[0].getLong(); assertEquals(lastSynced2, groupLastSynced); } } @Test public void testInitialSyncExternalGroup() throws Exception { ExternalGroup externalGroup = idp.getGroup("a"); String[] externalId = new String[] {externalGroup.getExternalId().getString()}; String[] result = syncMBean.syncExternalUsers(externalId); assertResultMessages(result, "a", "add"); UserManager userManager = getUserManager(); Group aGroup = userManager.getAuthorizable(externalGroup.getId(), Group.class); assertNotNull(aGroup); // membership of groups are not synced (unless imposed by user-sync with membership depth) for (ExternalIdentityRef groupRef : externalGroup.getDeclaredGroups()) { assertNull(userManager.getAuthorizable(groupRef.getId())); } } @Test public void testSyncExternalNonExisting() { ExternalIdentityRef ref = new ExternalIdentityRef("nonExisting", idp.getName()); String[] result = syncMBean.syncExternalUsers(new String[]{ref.getString()}); assertResultMessages(result, "", "nsi"); } /** * @see <a href="https://issues.apache.org/jira/browse/OAK-4346">OAK-4346</a> */ @Test public void testSyncExternalLocal() { ExternalIdentityRef ref = new ExternalIdentityRef(UserConstants.DEFAULT_ANONYMOUS_ID, null); String[] result = syncMBean.syncExternalUsers(new String[]{ref.getString()}); assertResultMessages(result, UserConstants.DEFAULT_ANONYMOUS_ID, "for"); } /** * @see <a href="https://issues.apache.org/jira/browse/OAK-4346">OAK-4346</a> */ @Test public void testSyncExternalForeign() { ExternalIdentityRef ref = new ExternalIdentityRef(ID_TEST_USER, "anotherIDP"); String[] result = syncMBean.syncExternalUsers(new String[]{ref.getString()}); assertResultMessages(result, ID_TEST_USER, "for"); result = syncMBean.syncExternalUsers(new String[] {ref.getString()}); assertResultMessages(result, ID_TEST_USER, "for"); } @Test public void testSyncExternalUserException() { ExternalIdentityRef ref = new ExternalIdentityRef(TestIdentityProvider.ID_EXCEPTION, idp.getName()); String[] result = syncMBean.syncExternalUsers(new String[] {ref.getString()}); assertResultMessages(result, TestIdentityProvider.ID_EXCEPTION, "ERR"); } @Test public void testSyncExternalUserThrowingHandler() { ExternalIdentityRef ref = new ExternalIdentityRef(ID_TEST_USER, idp.getName()); String[] result = createThrowingSyncMBean(false).syncExternalUsers(new String[]{ref.getString()}); assertResultMessages(result, ID_TEST_USER, "ERR"); } /** * test users have never been synced before => result must be empty */ @Test public void testSyncAllUsersBefore() { String[] result = syncMBean.syncAllUsers(false); assertEquals(0, result.length); } @Test public void testSyncAllUsers() throws Exception { // first sync external users into the repo syncMBean.syncAllExternalUsers(); // verify effect of syncAllUsers String[] result = syncMBean.syncAllUsers(false); Map<String, String> expected = getExpectedUserResult("upd", true); assertResultMessages(result, expected); UserManager userManager = getUserManager(); for (String id : expected.keySet()) { ExternalIdentity ei = idp.getUser(id); if (ei == null) { ei = idp.getGroup(id); } assertSync(ei, userManager); } } @Test public void testSyncAllGroups() throws Exception { // first sync external users into the repo Map<String, String> expected = new HashMap<>(); Iterator<ExternalGroup> grIt = idp.listGroups(); while (grIt.hasNext()) { ExternalGroup eg = grIt.next(); sync(idp, eg.getId(), true); expected.put(eg.getId(), "upd"); } // verify effect of syncAllUsers (which in this case are groups) String[] result = syncMBean.syncAllUsers(false); assertResultMessages(result, expected); UserManager userManager = getUserManager(); for (String id : expected.keySet()) { ExternalIdentity ei = idp.getGroup(id); assertSync(ei, userManager); } } @Test public void testSyncAllUsersPurgeFalse() throws Exception { // first sync external user|group into the repo that doesn't exist on the IDP (anymore) sync(new TestIdentityProvider.TestUser("thirdUser", idp.getName()), idp); sync(new TestIdentityProvider.TestGroup("g", idp.getName()), idp); // syncAll with purge = false String[] result = syncMBean.syncAllUsers(false); assertResultMessages(result, Map.of("thirdUser", "mis", "g", "mis")); UserManager userManager = getUserManager(); assertNotNull(userManager.getAuthorizable("thirdUser")); assertNotNull(userManager.getAuthorizable("g")); } @Test public void testSyncAllUsersPurgeTrue() throws Exception { // first sync external user|group into the repo that doesn't exist on the IDP (anymore) sync(new TestIdentityProvider.TestUser("thirdUser", idp.getName()), idp); sync(new TestIdentityProvider.TestGroup("g", idp.getName()), idp); // syncAll with purge = true String[] result = syncMBean.syncAllUsers(true); assertResultMessages(result, Map.of("thirdUser", "del", "g", "del")); UserManager userManager = getUserManager(); assertNull(userManager.getAuthorizable("thirdUser")); assertNull(userManager.getAuthorizable("g")); } @Test public void testSyncAllUsersForeign() throws Exception { // first sync external users + groups from 2 different IDPs into the repo // but set membership-nesting to 0 syncConfig.user().setMembershipNestingDepth(0); sync(idp, ID_TEST_USER, false); sync(idp, "a", true); sync(foreignIDP, TestIdentityProvider.ID_SECOND_USER, false); sync(foreignIDP, "aa", true); // verify effect of syncAllUsers : foreign user/group must be ignored by the sync. String[] result = syncMBean.syncAllUsers(false); Map<String, String> expectedResults = Map.of(ID_TEST_USER, "upd", "a", "upd"); assertResultMessages(result, expectedResults); ExternalIdentity[] expectedIds = new ExternalIdentity[] { idp.getUser(ID_TEST_USER), foreignIDP.getUser(TestIdentityProvider.ID_SECOND_USER), idp.getGroup("a"), foreignIDP.getGroup("aa") }; UserManager userManager = getUserManager(); for (ExternalIdentity externalIdentity : expectedIds) { assertSync(externalIdentity, userManager); } } @Test public void testSyncAllUsersException() throws Exception { User u = getUserManager().createUser(TestIdentityProvider.ID_EXCEPTION, null); u.setProperty(DefaultSyncContext.REP_EXTERNAL_ID, getValueFactory().createValue(new ExternalIdentityRef(TestIdentityProvider.ID_EXCEPTION, idp.getName()).getString())); root.commit(); String[] result = syncMBean.syncAllUsers(false); assertResultMessages(result, TestIdentityProvider.ID_EXCEPTION, "ERR"); result = syncMBean.syncAllUsers(true); assertResultMessages(result, TestIdentityProvider.ID_EXCEPTION, "ERR"); } @Test(expected = IllegalStateException.class) public void testSyncAllUsersThrowingHandler() { String[] result = createThrowingSyncMBean(false).syncAllUsers(false); } @Test public void testSyncAllUsersThrowingHandler2() throws Exception { syncMBean.syncAllExternalUsers(); Map<String, String> expected = getExpectedUserResult("ERR", true); String[] result = createThrowingSyncMBean(true).syncAllUsers(false); assertResultMessages(result, expected); } @Test public void testInitialSyncAllExternalUsers() throws Exception { String[] result = syncMBean.syncAllExternalUsers(); Map<String, String> expected = getExpectedUserResult("add", false); assertResultMessages(result, expected); UserManager userManager = getUserManager(); for (String id : expected.keySet()) { ExternalIdentity ei = idp.getUser(id); if (ei == null) { ei = idp.getGroup(id); } assertSync(ei, userManager); } } @Test public void testSyncAllExternalUsersAgain() throws Exception { syncMBean.syncAllExternalUsers(); // sync again String[] result = syncMBean.syncAllExternalUsers(); // verify result Map<String, String> expected = getExpectedUserResult("upd", false); assertResultMessages(result, expected); UserManager userManager = getUserManager(); for (String id : expected.keySet()) { ExternalIdentity ei = idp.getUser(id); if (ei == null) { ei = idp.getGroup(id); } assertSync(ei, userManager); } } @Test public void testSyncAllExternalUsersThrowingHandler() throws Exception { String[] result = createThrowingSyncMBean(false).syncAllExternalUsers(); Map<String, String> expected = getExpectedUserResult("ERR", false); assertResultMessages(result, expected); } @Test public void testListOrphanedUsers() throws Exception { syncMBean.syncAllExternalUsers(); String[] result = syncMBean.listOrphanedUsers(); assertEquals(0, result.length); sync(new TestIdentityProvider.TestUser("thirdUser", idp.getName()), idp); sync(new TestIdentityProvider.TestGroup("g", idp.getName()), idp); result = syncMBean.listOrphanedUsers(); assertEquals(2, result.length); assertEquals(Set.of("thirdUser", "g"), Set.of(result)); } @Test public void testListOrphanedUsersForeign() throws Exception { sync(foreignIDP, "a", true); sync(foreignIDP, ID_TEST_USER, false); String[] result = syncMBean.listOrphanedUsers(); assertEquals(0, result.length); } @Test public void testListOrphanedUsersException () throws Exception { User u = getUserManager().createUser(TestIdentityProvider.ID_EXCEPTION, null); u.setProperty(DefaultSyncContext.REP_EXTERNAL_ID, getValueFactory().createValue(new ExternalIdentityRef(TestIdentityProvider.ID_EXCEPTION, idp.getName()).getString())); root.commit(); String[] result = syncMBean.listOrphanedUsers(); assertEquals(0, result.length); } @Test public void testListOrphanedUsersThrowingHandler() throws Exception { sync(new TestIdentityProvider.TestUser("thirdUser", idp.getName()), idp); sync(new TestIdentityProvider.TestGroup("g", idp.getName()), idp); String[] result = createThrowingSyncMBean(false).listOrphanedUsers(); assertEquals(0, result.length); result = createThrowingSyncMBean(true).listOrphanedUsers(); assertEquals(2, result.length); assertEquals(Set.of("thirdUser", "g"), Set.of(result)); } @Test public void testPurgeOrphanedUsersNoPurge() { syncMBean.syncAllExternalUsers(); String[] result = syncMBean.purgeOrphanedUsers(); assertEquals(0, result.length); } @Test public void testPurgeOrphanedUsers() throws Exception { syncMBean.syncAllExternalUsers(); sync(new TestIdentityProvider.TestUser("thirdUser", idp.getName()), idp); sync(new TestIdentityProvider.TestGroup("g", idp.getName()), idp); String[] result = syncMBean.purgeOrphanedUsers(); assertResultMessages(result, Map.of("thirdUser", "del", "g", "del")); UserManager userManager = getUserManager(); assertNull(userManager.getAuthorizable("thirdUser")); assertNull(userManager.getAuthorizable("g")); } @Test public void testPurgeOrphanedUsersForeign() throws Exception { sync(foreignIDP, "a", true); sync(foreignIDP, ID_TEST_USER, false); String[] result = syncMBean.purgeOrphanedUsers(); assertEquals(0, result.length); } @Test public void testPurgeOrphanedUsersException() throws Exception { User u = getUserManager().createUser(TestIdentityProvider.ID_EXCEPTION, null); u.setProperty(DefaultSyncContext.REP_EXTERNAL_ID, getValueFactory().createValue(new ExternalIdentityRef(TestIdentityProvider.ID_EXCEPTION, idp.getName()).getString())); root.commit(); String[] result = syncMBean.purgeOrphanedUsers(); assertEquals(0, result.length); } @Test public void testPurgeOrphanedUsersThrowingHandler() throws Exception { sync(new TestIdentityProvider.TestUser("thirdUser", idp.getName()), idp); sync(new TestIdentityProvider.TestGroup("g", idp.getName()), idp); String[] result = createThrowingSyncMBean(false).purgeOrphanedUsers(); assertEquals(0, result.length); UserManager userManager = getUserManager(); assertNotNull(userManager.getAuthorizable("thirdUser")); assertNotNull(userManager.getAuthorizable("g")); } /** * @see <a href="https://issues.apache.org/jira/browse/OAK-4362">OAK-4362</a> */ @Test public void testPurgeOrphanedUsersThrowingHandler2() throws Exception { sync(new TestIdentityProvider.TestUser("thirdUser", idp.getName()), idp); sync(new TestIdentityProvider.TestGroup("g", idp.getName()), idp); String[] result = createThrowingSyncMBean(true).purgeOrphanedUsers(); assertResultMessages(result, Map.of("thirdUser", "ERR", "g", "ERR")); UserManager userManager = getUserManager(); assertNotNull(userManager.getAuthorizable("thirdUser")); assertNotNull(userManager.getAuthorizable("g")); } @Test public void testConvertToDynamicMembershipDefaultContext() throws Exception { sync(new TestIdentityProvider.TestUser(ID_TEST_USER, idp.getName()), idp); String[] result = syncMBean.convertToDynamicMembership(); assertNotNull(result); assertEquals(0, result.length); } @Test public void testConvertToDynamicMembership() throws Exception { syncConfig.user().setMembershipNestingDepth(2); sync(idp.getUser(ID_TEST_USER), idp); syncConfig.user().setDynamicMembership(true); String[] result = syncMBean.convertToDynamicMembership(); assertNotNull(result); assertEquals(1, result.length); ResultMessages expected = new ResultMessages(); expected.append(Collections.singletonList(new DefaultSyncResultImpl(DefaultSyncContext.createSyncedIdentity(getUserManager().getAuthorizable(ID_TEST_USER)), SyncResult.Status.UPDATE))); assertArrayEquals(expected.getMessages(), result); } @Test public void testConvertToDynamicMembershipNoop() throws Exception { syncConfig.user().setDynamicMembership(true); syncMBean.syncExternalUsers(new String[] {idp.getUser(ID_TEST_USER).getExternalId().getString()}); Authorizable a = getUserManager().getAuthorizable(ID_TEST_USER); assertNotNull(a); String[] result = syncMBean.convertToDynamicMembership(); assertNotNull(result); assertEquals(1, result.length); ResultMessages expected = new ResultMessages(); expected.append(Collections.singletonList(new DefaultSyncResultImpl(DefaultSyncContext.createSyncedIdentity(a), SyncResult.Status.NOP))); assertArrayEquals(expected.getMessages(), result); } @Test public void testConvertToDynamicMembershipForeign() throws Exception { String idpName = "anotherIDP"; TestIdentityProvider anotherIdp = new TestIdentityProvider(idpName); sync(anotherIdp.getUser(ID_TEST_USER), anotherIdp); syncConfig.user().setDynamicMembership(true); Authorizable a = getUserManager().getAuthorizable(ID_TEST_USER); assertNotNull(a); String[] result = syncMBean.convertToDynamicMembership(); assertNotNull(result); assertEquals(0, result.length); } /** * SyncHandler implementation that throws Exceptions. */ private final class ThrowingSyncHandler extends DefaultSyncHandler { private static final String NAME = "throwing"; private static final String NAME_ALLOWS_IDENTITY_LISTING = "throwingExceptListIdentities"; boolean allowsListIdentities; private ThrowingSyncHandler(boolean allowsListIdentities) { super(syncConfig); this.allowsListIdentities = allowsListIdentities; } @NotNull @Override public String getName() { return allowsListIdentities ? NAME_ALLOWS_IDENTITY_LISTING : NAME; } @NotNull @Override public SyncContext createContext(@NotNull ExternalIdentityProvider idp, @NotNull UserManager userManager, @NotNull ValueFactory valueFactory) { return new DefaultSyncContext(syncConfig, idp, userManager, valueFactory) { @NotNull @Override public SyncResult sync(@NotNull ExternalIdentity identity) throws SyncException { throw new SyncException("sync " + identity); } @NotNull @Override public SyncResult sync(@NotNull String id) throws SyncException { throw new SyncException("sync " + id); } }; } @Nullable @Override public SyncedIdentity findIdentity(@NotNull UserManager userManager, @NotNull String id) throws RepositoryException { throw new RepositoryException("findIdentity"); } @Override public boolean requiresSync(@NotNull SyncedIdentity identity) { return false; } @NotNull @Override public Iterator<SyncedIdentity> listIdentities(@NotNull UserManager userManager) throws RepositoryException { if (!allowsListIdentities) { throw new RepositoryException("listIdentities"); } else { return super.listIdentities(userManager); } } } }
apache/directory-studio
34,859
plugins/schemaeditor/src/test/java/org/apache/directory/studio/schemaeditor/model/difference/DifferenceEngineTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.directory.studio.schemaeditor.model.difference; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.fail; import java.util.Arrays; import java.util.List; import org.apache.directory.api.ldap.model.schema.AttributeType; import org.apache.directory.api.ldap.model.schema.ObjectClass; import org.apache.directory.api.ldap.model.schema.ObjectClassTypeEnum; import org.apache.directory.api.ldap.model.schema.UsageEnum; import org.junit.jupiter.api.Test; /** * This class tests the DifferenceEngine class. * * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a> */ public class DifferenceEngineTest { /** * Tests the AddAliasDifference. * * @throws Exception */ @Test public void testAddAliasDifference() throws Exception { AttributeType o1 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ AttributeType o2 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o2.setNames( new String[] { "alias" } ); //$NON-NLS-1$ List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof AliasDifference ) || ( !difference.getType().equals( DifferenceType.ADDED ) ) ) { fail(); } assertEquals( "alias", ( ( AliasDifference ) difference ).getNewValue() ); //$NON-NLS-1$ } /** * Tests the AddDescriptionDifference. * * @throws Exception */ @Test public void testAddDescriptionDifference() throws Exception { AttributeType o1 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ AttributeType o2 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o2.setDescription( "Description" ); //$NON-NLS-1$ List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof DescriptionDifference ) || ( !difference.getType().equals( DifferenceType.ADDED ) ) ) { fail(); } assertEquals( "Description", ( ( DescriptionDifference ) difference ).getNewValue() ); //$NON-NLS-1$ } /** * Tests the AddEqualityDifference. * * @throws Exception */ @Test public void testAddEqualityDifference() throws Exception { AttributeType o1 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ AttributeType o2 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o2.setEqualityOid( "Equality" ); //$NON-NLS-1$ List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof EqualityDifference ) || ( !difference.getType().equals( DifferenceType.ADDED ) ) ) { fail(); } assertEquals( "Equality", ( ( EqualityDifference ) difference ).getNewValue() ); //$NON-NLS-1$ } /** * Tests the AddMandatoryATDifference. * * @throws Exception */ @Test public void testAddMandatoryATDifference() throws Exception { ObjectClass o1 = new ObjectClass( "1.2.3.4" ); //$NON-NLS-1$ ObjectClass o2 = new ObjectClass( "1.2.3.4" ); //$NON-NLS-1$ o2.setMustAttributeTypeOids( Arrays.asList( new String[] { "must" } ) ); //$NON-NLS-1$ List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof MandatoryATDifference ) || ( !difference.getType().equals( DifferenceType.ADDED ) ) ) { fail(); } assertEquals( "must", ( ( MandatoryATDifference ) difference ).getNewValue() ); //$NON-NLS-1$ } /** * Tests the AddOptionalATDifference. * * @throws Exception */ @Test public void testAddOptionalATDifference() throws Exception { ObjectClass o1 = new ObjectClass( "1.2.3.4" ); //$NON-NLS-1$ ObjectClass o2 = new ObjectClass( "1.2.3.4" ); //$NON-NLS-1$ o2.setMayAttributeTypeOids( Arrays.asList( new String[] { "may" } ) ); //$NON-NLS-1$ List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof OptionalATDifference ) || ( !difference.getType().equals( DifferenceType.ADDED ) ) ) { fail(); } assertEquals( "may", ( ( OptionalATDifference ) difference ).getNewValue() ); //$NON-NLS-1$ } /** * Tests the AddOrderingDifference. * * @throws Exception */ @Test public void testAddOrderingDifference() throws Exception { AttributeType o1 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ AttributeType o2 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o2.setOrderingOid( "Ordering" ); //$NON-NLS-1$ List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof OrderingDifference ) || ( !difference.getType().equals( DifferenceType.ADDED ) ) ) { fail(); } assertEquals( "Ordering", ( ( OrderingDifference ) difference ).getNewValue() ); //$NON-NLS-1$ } /** * Tests the AddSubstringDifference. * * @throws Exception */ @Test public void testAddSubstringDifference() throws Exception { AttributeType o1 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ AttributeType o2 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o2.setSubstringOid( "Substring" ); //$NON-NLS-1$ List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof SubstringDifference ) || ( !difference.getType().equals( DifferenceType.ADDED ) ) ) { fail(); } assertEquals( "Substring", ( ( SubstringDifference ) difference ).getNewValue() ); //$NON-NLS-1$ } /** * Tests the AddSuperiorATDifference. * * @throws Exception */ @Test public void testAddSuperiorATDifference() throws Exception { AttributeType o1 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ AttributeType o2 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o2.setSuperiorOid( "superiorAT" ); //$NON-NLS-1$ List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof SuperiorATDifference ) || ( !difference.getType().equals( DifferenceType.ADDED ) ) ) { fail(); } assertEquals( "superiorAT", ( ( SuperiorATDifference ) difference ).getNewValue() ); //$NON-NLS-1$ } /** * Tests the AddSuperiorOCDifference. * * @throws Exception */ @Test public void testAddSuperiorOCDifference() throws Exception { ObjectClass o1 = new ObjectClass( "1.2.3.4" ); //$NON-NLS-1$ ObjectClass o2 = new ObjectClass( "1.2.3.4" ); //$NON-NLS-1$ o2.setSuperiorOids( Arrays.asList( new String[] { "superiorOC" } ) ); //$NON-NLS-1$ List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof SuperiorOCDifference ) || ( !difference.getType().equals( DifferenceType.ADDED ) ) ) { fail(); } assertEquals( "superiorOC", ( ( SuperiorOCDifference ) difference ).getNewValue() ); //$NON-NLS-1$ } /** * Tests the AddSyntaxDifference. * * @throws Exception */ @Test public void testAddSyntaxDifference() throws Exception { AttributeType o1 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ AttributeType o2 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o2.setSyntaxOid( "1.2.3.4.5" ); //$NON-NLS-1$ List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof SyntaxDifference ) || ( !difference.getType().equals( DifferenceType.ADDED ) ) ) { fail(); } assertEquals( "1.2.3.4.5", ( ( SyntaxDifference ) difference ).getNewValue() ); //$NON-NLS-1$ } /** * Tests the AddSyntaxLengthDifference. * * @throws Exception */ @Test public void testAddSyntaxLengthDifference() throws Exception { AttributeType o1 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ AttributeType o2 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o2.setSyntaxLength( 1234 ); List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof SyntaxLengthDifference ) || ( !difference.getType().equals( DifferenceType.ADDED ) ) ) { fail(); } assertEquals( 1234L, ( ( SyntaxLengthDifference ) difference ).getNewValue() ); } /** * Tests the ModifyClassTypeDifference. * * @throws Exception */ @Test public void testModifyClassTypeDifference() throws Exception { ObjectClass o1 = new ObjectClass( "1.2.3.4" ); //$NON-NLS-1$ o1.setType( ObjectClassTypeEnum.STRUCTURAL ); ObjectClass o2 = new ObjectClass( "1.2.3.4" ); //$NON-NLS-1$ o2.setType( ObjectClassTypeEnum.ABSTRACT ); List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof ClassTypeDifference ) || ( !difference.getType().equals( DifferenceType.MODIFIED ) ) ) { fail(); } assertEquals( ObjectClassTypeEnum.STRUCTURAL, ( ( ClassTypeDifference ) difference ).getOldValue() ); assertEquals( ObjectClassTypeEnum.ABSTRACT, ( ( ClassTypeDifference ) difference ).getNewValue() ); } /** * Tests the ModifyCollectiveDifference. * * @throws Exception */ @Test public void testModifyCollectiveDifference() throws Exception { AttributeType o1 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o1.setCollective( true ); AttributeType o2 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o2.setCollective( false ); List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof CollectiveDifference ) || ( !difference.getType().equals( DifferenceType.MODIFIED ) ) ) { fail(); } assertEquals( true, ( ( CollectiveDifference ) difference ).getOldValue() ); assertEquals( false, ( ( CollectiveDifference ) difference ).getNewValue() ); } /** * Tests the ModifyDescriptionDifference. * * @throws Exception */ @Test public void testModifyDescriptionDifference() throws Exception { AttributeType o1 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o1.setDescription( "Description" ); //$NON-NLS-1$ AttributeType o2 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o2.setDescription( "New Description" ); //$NON-NLS-1$ List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof DescriptionDifference ) || ( !difference.getType().equals( DifferenceType.MODIFIED ) ) ) { fail(); } assertEquals( "Description", ( ( DescriptionDifference ) difference ).getOldValue() ); //$NON-NLS-1$ assertEquals( "New Description", ( ( DescriptionDifference ) difference ).getNewValue() ); //$NON-NLS-1$ } /** * Tests the ModifyEqualityDifference. * * @throws Exception */ @Test public void testModifyEqualityDifference() throws Exception { AttributeType o1 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o1.setEqualityOid( "equalityName" ); //$NON-NLS-1$ AttributeType o2 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o2.setEqualityOid( "newEqualityName" ); //$NON-NLS-1$ List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof EqualityDifference ) || ( !difference.getType().equals( DifferenceType.MODIFIED ) ) ) { fail(); } assertEquals( "equalityName", ( ( EqualityDifference ) difference ).getOldValue() ); //$NON-NLS-1$ assertEquals( "newEqualityName", ( ( EqualityDifference ) difference ).getNewValue() ); //$NON-NLS-1$ } /** * Tests the ModifyNoUserModificationDifference. * * @throws Exception */ @Test public void testModifyNoUserModificationDifference() throws Exception { AttributeType o1 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o1.setUserModifiable( true ); AttributeType o2 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o2.setUserModifiable( false ); List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof NoUserModificationDifference ) || ( !difference.getType().equals( DifferenceType.MODIFIED ) ) ) { fail(); } assertEquals( true, ( ( NoUserModificationDifference ) difference ).getOldValue() ); assertEquals( false, ( ( NoUserModificationDifference ) difference ).getNewValue() ); } /** * Tests the ModifyObsoleteDifference. * * @throws Exception */ @Test public void testModifyObsoleteDifference() throws Exception { AttributeType o1 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o1.setObsolete( true ); AttributeType o2 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o2.setObsolete( false ); List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof ObsoleteDifference ) || ( !difference.getType().equals( DifferenceType.MODIFIED ) ) ) { fail(); } assertEquals( true, ( ( ObsoleteDifference ) difference ).getOldValue() ); assertEquals( false, ( ( ObsoleteDifference ) difference ).getNewValue() ); } /** * Tests the ModifyOrderingDifference. * * @throws Exception */ @Test public void testModifyOrderingDifference() throws Exception { AttributeType o1 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o1.setOrderingOid( "orderingName" ); //$NON-NLS-1$ AttributeType o2 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o2.setOrderingOid( "newOrderingName" ); //$NON-NLS-1$ List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof OrderingDifference ) || ( !difference.getType().equals( DifferenceType.MODIFIED ) ) ) { fail(); } assertEquals( "orderingName", ( ( OrderingDifference ) difference ).getOldValue() ); //$NON-NLS-1$ assertEquals( "newOrderingName", ( ( OrderingDifference ) difference ).getNewValue() ); //$NON-NLS-1$ } /** * Tests the ModifySingleValueDifference. * * @throws Exception */ @Test public void testModifySingleValueDifference() throws Exception { AttributeType o1 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o1.setSingleValued( true ); AttributeType o2 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o2.setSingleValued( false ); List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof SingleValueDifference ) || ( !difference.getType().equals( DifferenceType.MODIFIED ) ) ) { fail(); } assertEquals( true, ( ( SingleValueDifference ) difference ).getOldValue() ); assertEquals( false, ( ( SingleValueDifference ) difference ).getNewValue() ); } /** * Tests the ModifySubstringDifference. * * @throws Exception */ @Test public void testModifySubstringDifference() throws Exception { AttributeType o1 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o1.setSubstringOid( "substrName" ); //$NON-NLS-1$ AttributeType o2 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o2.setSubstringOid( "newSubstrName" ); //$NON-NLS-1$ List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof SubstringDifference ) || ( !difference.getType().equals( DifferenceType.MODIFIED ) ) ) { fail(); } assertEquals( "substrName", ( ( SubstringDifference ) difference ).getOldValue() ); //$NON-NLS-1$ assertEquals( "newSubstrName", ( ( SubstringDifference ) difference ).getNewValue() ); //$NON-NLS-1$ } /** * Tests the ModifySuperiorATDifference. * * @throws Exception */ @Test public void testModifySuperiorATDifference() throws Exception { AttributeType o1 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o1.setSuperiorOid( "superiorName" ); //$NON-NLS-1$ AttributeType o2 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o2.setSuperiorOid( "newSuperiorName" ); //$NON-NLS-1$ List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof SuperiorATDifference ) || ( !difference.getType().equals( DifferenceType.MODIFIED ) ) ) { fail(); } assertEquals( "superiorName", ( ( SuperiorATDifference ) difference ).getOldValue() ); //$NON-NLS-1$ assertEquals( "newSuperiorName", ( ( SuperiorATDifference ) difference ).getNewValue() ); //$NON-NLS-1$ } /** * Tests the ModifySyntaxDifference. * * @throws Exception */ @Test public void testModifySyntaxDifference() throws Exception { AttributeType o1 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o1.setSyntaxOid( "1.2.3.4.5" ); //$NON-NLS-1$ AttributeType o2 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o2.setSyntaxOid( "1.2.3.4.6" ); //$NON-NLS-1$ List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof SyntaxDifference ) || ( !difference.getType().equals( DifferenceType.MODIFIED ) ) ) { fail(); } assertEquals( "1.2.3.4.5", ( ( SyntaxDifference ) difference ).getOldValue() ); //$NON-NLS-1$ assertEquals( "1.2.3.4.6", ( ( SyntaxDifference ) difference ).getNewValue() ); //$NON-NLS-1$ } /** * Tests the ModifySyntaxLengthDifference. * * @throws Exception */ @Test public void testModifySyntaxLengthDifference() throws Exception { AttributeType o1 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o1.setSyntaxLength( 1234 ); AttributeType o2 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o2.setSyntaxLength( 12345 ); List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof SyntaxLengthDifference ) || ( !difference.getType().equals( DifferenceType.MODIFIED ) ) ) { fail(); } assertEquals( 1234L, ( ( SyntaxLengthDifference ) difference ).getOldValue() ); assertEquals( 12345L, ( ( SyntaxLengthDifference ) difference ).getNewValue() ); } /** * Tests the ModifyUsageDifference. * * @throws Exception */ @Test public void testModifyUsageDifference() throws Exception { AttributeType o1 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o1.setUsage( UsageEnum.DISTRIBUTED_OPERATION ); AttributeType o2 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o2.setUsage( UsageEnum.DIRECTORY_OPERATION ); List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof UsageDifference ) || ( !difference.getType().equals( DifferenceType.MODIFIED ) ) ) { fail(); } assertEquals( UsageEnum.DISTRIBUTED_OPERATION, ( ( UsageDifference ) difference ).getOldValue() ); assertEquals( UsageEnum.DIRECTORY_OPERATION, ( ( UsageDifference ) difference ).getNewValue() ); } /** * Tests the RemoveAliasDifference. * * @throws Exception */ @Test public void testRemoveAliasDifference() throws Exception { AttributeType o1 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o1.setNames( new String[] { "name1", "name2" } ); //$NON-NLS-1$ //$NON-NLS-2$ AttributeType o2 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o2.setNames( new String[] { "name2" } ); //$NON-NLS-1$ List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof AliasDifference ) || ( !difference.getType().equals( DifferenceType.REMOVED ) ) ) { fail(); } assertEquals( "name1", ( ( AliasDifference ) difference ).getOldValue() ); //$NON-NLS-1$ assertNull( ( ( AliasDifference ) difference ).getNewValue() ); } /** * Tests the RemoveDescriptionDifference. * * @throws Exception */ @Test public void testRemoveDescriptionDifference() throws Exception { AttributeType o1 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o1.setDescription( "Description" ); //$NON-NLS-1$ AttributeType o2 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof DescriptionDifference ) || ( !difference.getType().equals( DifferenceType.REMOVED ) ) ) { fail(); } assertEquals( "Description", ( ( DescriptionDifference ) difference ).getOldValue() ); //$NON-NLS-1$ assertNull( ( ( DescriptionDifference ) difference ).getNewValue() ); } /** * Tests the RemoveEqualityDifference. * * @throws Exception */ @Test public void testRemoveEqualityDifference() throws Exception { AttributeType o1 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o1.setEqualityOid( "equalityName" ); //$NON-NLS-1$ AttributeType o2 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof EqualityDifference ) || ( !difference.getType().equals( DifferenceType.REMOVED ) ) ) { fail(); } assertEquals( "equalityName", ( ( EqualityDifference ) difference ).getOldValue() ); //$NON-NLS-1$ assertNull( ( ( EqualityDifference ) difference ).getNewValue() ); } /** * Tests the RemoveMandatoryATDifference. * * @throws Exception */ @Test public void testRemoveMandatoryATDifference() throws Exception { ObjectClass o1 = new ObjectClass( "1.2.3.4" ); //$NON-NLS-1$ o1.setMustAttributeTypeOids( Arrays.asList( new String[] { "must1", "must2" } ) ); //$NON-NLS-1$ //$NON-NLS-2$ ObjectClass o2 = new ObjectClass( "1.2.3.4" ); //$NON-NLS-1$ o2.setMustAttributeTypeOids( Arrays.asList( new String[] { "must2" } ) ); //$NON-NLS-1$ List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof MandatoryATDifference ) || ( !difference.getType().equals( DifferenceType.REMOVED ) ) ) { fail(); } assertEquals( "must1", ( ( MandatoryATDifference ) difference ).getOldValue() ); //$NON-NLS-1$ assertNull( ( ( MandatoryATDifference ) difference ).getNewValue() ); } /** * Tests the RemoveOptionalATDifference. * * @throws Exception */ @Test public void testRemoveOptionalATDifference() throws Exception { ObjectClass o1 = new ObjectClass( "1.2.3.4" ); //$NON-NLS-1$ o1.setMayAttributeTypeOids( Arrays.asList( new String[] { "may1", "may2" } ) ); //$NON-NLS-1$ //$NON-NLS-2$ ObjectClass o2 = new ObjectClass( "1.2.3.4" ); //$NON-NLS-1$ o2.setMayAttributeTypeOids( Arrays.asList( new String[] { "may2" } ) ); //$NON-NLS-1$ List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof OptionalATDifference ) || ( !difference.getType().equals( DifferenceType.REMOVED ) ) ) { fail(); } assertEquals( "may1", ( ( OptionalATDifference ) difference ).getOldValue() ); //$NON-NLS-1$ assertNull( ( ( OptionalATDifference ) difference ).getNewValue() ); } /** * Tests the RemoveOrderingDifference. * * @throws Exception */ @Test public void testRemoveOrderingDifference() throws Exception { AttributeType o1 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o1.setOrderingOid( "orderingName" ); //$NON-NLS-1$ AttributeType o2 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof OrderingDifference ) || ( !difference.getType().equals( DifferenceType.REMOVED ) ) ) { fail(); } assertEquals( "orderingName", ( ( OrderingDifference ) difference ).getOldValue() ); //$NON-NLS-1$ assertNull( ( ( OrderingDifference ) difference ).getNewValue() ); } /** * Tests the RemoveSubstringDifference. * * @throws Exception */ @Test public void testRemoveSubstringDifference() throws Exception { AttributeType o1 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o1.setSubstringOid( "substrName" ); //$NON-NLS-1$ AttributeType o2 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof SubstringDifference ) || ( !difference.getType().equals( DifferenceType.REMOVED ) ) ) { fail(); } assertEquals( "substrName", ( ( SubstringDifference ) difference ).getOldValue() ); //$NON-NLS-1$ assertNull( ( ( SubstringDifference ) difference ).getNewValue() ); } /** * Tests the RemoveSuperiorATDifference. * * @throws Exception */ @Test public void testRemoveSuperiorATDifference() throws Exception { AttributeType o1 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o1.setSuperiorOid( "superiorName" ); //$NON-NLS-1$ AttributeType o2 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof SuperiorATDifference ) || ( !difference.getType().equals( DifferenceType.REMOVED ) ) ) { fail(); } assertEquals( "superiorName", ( ( SuperiorATDifference ) difference ).getOldValue() ); //$NON-NLS-1$ assertNull( ( ( SuperiorATDifference ) difference ).getNewValue() ); } /** * Tests the RemoveSuperiorOCDifference. * * @throws Exception */ @Test public void testRemoveSuperiorOCDifference() throws Exception { ObjectClass o1 = new ObjectClass( "1.2.3.4" ); //$NON-NLS-1$ o1.setSuperiorOids( Arrays.asList( new String[] { "sup1", "sup2" } ) ); //$NON-NLS-1$ //$NON-NLS-2$ ObjectClass o2 = new ObjectClass( "1.2.3.4" ); //$NON-NLS-1$ o2.setSuperiorOids( Arrays.asList( new String[] { "sup2" } ) ); //$NON-NLS-1$ List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof SuperiorOCDifference ) || ( !difference.getType().equals( DifferenceType.REMOVED ) ) ) { fail(); } assertEquals( "sup1", ( ( SuperiorOCDifference ) difference ).getOldValue() ); //$NON-NLS-1$ assertNull( ( ( SuperiorOCDifference ) difference ).getNewValue() ); } /** * Tests the RemoveSyntaxDifference. * * @throws Exception */ @Test public void testRemoveSyntaxDifference() throws Exception { AttributeType o1 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o1.setSyntaxOid( "1.2.3.4.5" ); //$NON-NLS-1$ AttributeType o2 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof SyntaxDifference ) || ( !difference.getType().equals( DifferenceType.REMOVED ) ) ) { fail(); } assertEquals( "1.2.3.4.5", ( ( SyntaxDifference ) difference ).getOldValue() ); //$NON-NLS-1$ assertNull( ( ( SyntaxDifference ) difference ).getNewValue() ); } /** * Tests the RemoveSyntaxLengthDifference. * * @throws Exception */ @Test public void testRemoveSyntaxLengthDifference() throws Exception { AttributeType o1 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ o1.setSyntaxLength( 1234 ); AttributeType o2 = new AttributeType( "1.2.3.4" ); //$NON-NLS-1$ List<PropertyDifference> differences = DifferenceEngine.getDifferences( o1, o2 ); assertEquals( 1, differences.size() ); Difference difference = differences.get( 0 ); if ( !( difference instanceof SyntaxLengthDifference ) || ( !difference.getType().equals( DifferenceType.REMOVED ) ) ) { fail(); } assertEquals( 1234L, ( ( SyntaxLengthDifference ) difference ).getOldValue() ); assertNull( ( ( SyntaxLengthDifference ) difference ).getNewValue() ); } }
googleapis/google-cloud-java
35,018
java-artifact-registry/proto-google-cloud-artifact-registry-v1/src/main/java/com/google/devtools/artifactregistry/v1/ListTagsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/devtools/artifactregistry/v1/tag.proto // Protobuf Java Version: 3.25.8 package com.google.devtools.artifactregistry.v1; /** * * * <pre> * The response from listing tags. * </pre> * * Protobuf type {@code google.devtools.artifactregistry.v1.ListTagsResponse} */ public final class ListTagsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.devtools.artifactregistry.v1.ListTagsResponse) ListTagsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListTagsResponse.newBuilder() to construct. private ListTagsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListTagsResponse() { tags_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListTagsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.devtools.artifactregistry.v1.TagProto .internal_static_google_devtools_artifactregistry_v1_ListTagsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.devtools.artifactregistry.v1.TagProto .internal_static_google_devtools_artifactregistry_v1_ListTagsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.devtools.artifactregistry.v1.ListTagsResponse.class, com.google.devtools.artifactregistry.v1.ListTagsResponse.Builder.class); } public static final int TAGS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.devtools.artifactregistry.v1.Tag> tags_; /** * * * <pre> * The tags returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1.Tag tags = 1;</code> */ @java.lang.Override public java.util.List<com.google.devtools.artifactregistry.v1.Tag> getTagsList() { return tags_; } /** * * * <pre> * The tags returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1.Tag tags = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.devtools.artifactregistry.v1.TagOrBuilder> getTagsOrBuilderList() { return tags_; } /** * * * <pre> * The tags returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1.Tag tags = 1;</code> */ @java.lang.Override public int getTagsCount() { return tags_.size(); } /** * * * <pre> * The tags returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1.Tag tags = 1;</code> */ @java.lang.Override public com.google.devtools.artifactregistry.v1.Tag getTags(int index) { return tags_.get(index); } /** * * * <pre> * The tags returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1.Tag tags = 1;</code> */ @java.lang.Override public com.google.devtools.artifactregistry.v1.TagOrBuilder getTagsOrBuilder(int index) { return tags_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * The token to retrieve the next page of tags, or empty if there are no * more tags to return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * The token to retrieve the next page of tags, or empty if there are no * more tags to return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < tags_.size(); i++) { output.writeMessage(1, tags_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < tags_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, tags_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.devtools.artifactregistry.v1.ListTagsResponse)) { return super.equals(obj); } com.google.devtools.artifactregistry.v1.ListTagsResponse other = (com.google.devtools.artifactregistry.v1.ListTagsResponse) obj; if (!getTagsList().equals(other.getTagsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getTagsCount() > 0) { hash = (37 * hash) + TAGS_FIELD_NUMBER; hash = (53 * hash) + getTagsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.devtools.artifactregistry.v1.ListTagsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.artifactregistry.v1.ListTagsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.artifactregistry.v1.ListTagsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.artifactregistry.v1.ListTagsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.artifactregistry.v1.ListTagsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.artifactregistry.v1.ListTagsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.artifactregistry.v1.ListTagsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.devtools.artifactregistry.v1.ListTagsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.devtools.artifactregistry.v1.ListTagsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.devtools.artifactregistry.v1.ListTagsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.devtools.artifactregistry.v1.ListTagsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.devtools.artifactregistry.v1.ListTagsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.devtools.artifactregistry.v1.ListTagsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The response from listing tags. * </pre> * * Protobuf type {@code google.devtools.artifactregistry.v1.ListTagsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.devtools.artifactregistry.v1.ListTagsResponse) com.google.devtools.artifactregistry.v1.ListTagsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.devtools.artifactregistry.v1.TagProto .internal_static_google_devtools_artifactregistry_v1_ListTagsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.devtools.artifactregistry.v1.TagProto .internal_static_google_devtools_artifactregistry_v1_ListTagsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.devtools.artifactregistry.v1.ListTagsResponse.class, com.google.devtools.artifactregistry.v1.ListTagsResponse.Builder.class); } // Construct using com.google.devtools.artifactregistry.v1.ListTagsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (tagsBuilder_ == null) { tags_ = java.util.Collections.emptyList(); } else { tags_ = null; tagsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.devtools.artifactregistry.v1.TagProto .internal_static_google_devtools_artifactregistry_v1_ListTagsResponse_descriptor; } @java.lang.Override public com.google.devtools.artifactregistry.v1.ListTagsResponse getDefaultInstanceForType() { return com.google.devtools.artifactregistry.v1.ListTagsResponse.getDefaultInstance(); } @java.lang.Override public com.google.devtools.artifactregistry.v1.ListTagsResponse build() { com.google.devtools.artifactregistry.v1.ListTagsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.devtools.artifactregistry.v1.ListTagsResponse buildPartial() { com.google.devtools.artifactregistry.v1.ListTagsResponse result = new com.google.devtools.artifactregistry.v1.ListTagsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.devtools.artifactregistry.v1.ListTagsResponse result) { if (tagsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { tags_ = java.util.Collections.unmodifiableList(tags_); bitField0_ = (bitField0_ & ~0x00000001); } result.tags_ = tags_; } else { result.tags_ = tagsBuilder_.build(); } } private void buildPartial0(com.google.devtools.artifactregistry.v1.ListTagsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.devtools.artifactregistry.v1.ListTagsResponse) { return mergeFrom((com.google.devtools.artifactregistry.v1.ListTagsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.devtools.artifactregistry.v1.ListTagsResponse other) { if (other == com.google.devtools.artifactregistry.v1.ListTagsResponse.getDefaultInstance()) return this; if (tagsBuilder_ == null) { if (!other.tags_.isEmpty()) { if (tags_.isEmpty()) { tags_ = other.tags_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureTagsIsMutable(); tags_.addAll(other.tags_); } onChanged(); } } else { if (!other.tags_.isEmpty()) { if (tagsBuilder_.isEmpty()) { tagsBuilder_.dispose(); tagsBuilder_ = null; tags_ = other.tags_; bitField0_ = (bitField0_ & ~0x00000001); tagsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getTagsFieldBuilder() : null; } else { tagsBuilder_.addAllMessages(other.tags_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.devtools.artifactregistry.v1.Tag m = input.readMessage( com.google.devtools.artifactregistry.v1.Tag.parser(), extensionRegistry); if (tagsBuilder_ == null) { ensureTagsIsMutable(); tags_.add(m); } else { tagsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.devtools.artifactregistry.v1.Tag> tags_ = java.util.Collections.emptyList(); private void ensureTagsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { tags_ = new java.util.ArrayList<com.google.devtools.artifactregistry.v1.Tag>(tags_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.devtools.artifactregistry.v1.Tag, com.google.devtools.artifactregistry.v1.Tag.Builder, com.google.devtools.artifactregistry.v1.TagOrBuilder> tagsBuilder_; /** * * * <pre> * The tags returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1.Tag tags = 1;</code> */ public java.util.List<com.google.devtools.artifactregistry.v1.Tag> getTagsList() { if (tagsBuilder_ == null) { return java.util.Collections.unmodifiableList(tags_); } else { return tagsBuilder_.getMessageList(); } } /** * * * <pre> * The tags returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1.Tag tags = 1;</code> */ public int getTagsCount() { if (tagsBuilder_ == null) { return tags_.size(); } else { return tagsBuilder_.getCount(); } } /** * * * <pre> * The tags returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1.Tag tags = 1;</code> */ public com.google.devtools.artifactregistry.v1.Tag getTags(int index) { if (tagsBuilder_ == null) { return tags_.get(index); } else { return tagsBuilder_.getMessage(index); } } /** * * * <pre> * The tags returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1.Tag tags = 1;</code> */ public Builder setTags(int index, com.google.devtools.artifactregistry.v1.Tag value) { if (tagsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTagsIsMutable(); tags_.set(index, value); onChanged(); } else { tagsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The tags returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1.Tag tags = 1;</code> */ public Builder setTags( int index, com.google.devtools.artifactregistry.v1.Tag.Builder builderForValue) { if (tagsBuilder_ == null) { ensureTagsIsMutable(); tags_.set(index, builderForValue.build()); onChanged(); } else { tagsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The tags returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1.Tag tags = 1;</code> */ public Builder addTags(com.google.devtools.artifactregistry.v1.Tag value) { if (tagsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTagsIsMutable(); tags_.add(value); onChanged(); } else { tagsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The tags returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1.Tag tags = 1;</code> */ public Builder addTags(int index, com.google.devtools.artifactregistry.v1.Tag value) { if (tagsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTagsIsMutable(); tags_.add(index, value); onChanged(); } else { tagsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The tags returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1.Tag tags = 1;</code> */ public Builder addTags(com.google.devtools.artifactregistry.v1.Tag.Builder builderForValue) { if (tagsBuilder_ == null) { ensureTagsIsMutable(); tags_.add(builderForValue.build()); onChanged(); } else { tagsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The tags returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1.Tag tags = 1;</code> */ public Builder addTags( int index, com.google.devtools.artifactregistry.v1.Tag.Builder builderForValue) { if (tagsBuilder_ == null) { ensureTagsIsMutable(); tags_.add(index, builderForValue.build()); onChanged(); } else { tagsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The tags returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1.Tag tags = 1;</code> */ public Builder addAllTags( java.lang.Iterable<? extends com.google.devtools.artifactregistry.v1.Tag> values) { if (tagsBuilder_ == null) { ensureTagsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, tags_); onChanged(); } else { tagsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The tags returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1.Tag tags = 1;</code> */ public Builder clearTags() { if (tagsBuilder_ == null) { tags_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { tagsBuilder_.clear(); } return this; } /** * * * <pre> * The tags returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1.Tag tags = 1;</code> */ public Builder removeTags(int index) { if (tagsBuilder_ == null) { ensureTagsIsMutable(); tags_.remove(index); onChanged(); } else { tagsBuilder_.remove(index); } return this; } /** * * * <pre> * The tags returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1.Tag tags = 1;</code> */ public com.google.devtools.artifactregistry.v1.Tag.Builder getTagsBuilder(int index) { return getTagsFieldBuilder().getBuilder(index); } /** * * * <pre> * The tags returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1.Tag tags = 1;</code> */ public com.google.devtools.artifactregistry.v1.TagOrBuilder getTagsOrBuilder(int index) { if (tagsBuilder_ == null) { return tags_.get(index); } else { return tagsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The tags returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1.Tag tags = 1;</code> */ public java.util.List<? extends com.google.devtools.artifactregistry.v1.TagOrBuilder> getTagsOrBuilderList() { if (tagsBuilder_ != null) { return tagsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(tags_); } } /** * * * <pre> * The tags returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1.Tag tags = 1;</code> */ public com.google.devtools.artifactregistry.v1.Tag.Builder addTagsBuilder() { return getTagsFieldBuilder() .addBuilder(com.google.devtools.artifactregistry.v1.Tag.getDefaultInstance()); } /** * * * <pre> * The tags returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1.Tag tags = 1;</code> */ public com.google.devtools.artifactregistry.v1.Tag.Builder addTagsBuilder(int index) { return getTagsFieldBuilder() .addBuilder(index, com.google.devtools.artifactregistry.v1.Tag.getDefaultInstance()); } /** * * * <pre> * The tags returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1.Tag tags = 1;</code> */ public java.util.List<com.google.devtools.artifactregistry.v1.Tag.Builder> getTagsBuilderList() { return getTagsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.devtools.artifactregistry.v1.Tag, com.google.devtools.artifactregistry.v1.Tag.Builder, com.google.devtools.artifactregistry.v1.TagOrBuilder> getTagsFieldBuilder() { if (tagsBuilder_ == null) { tagsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.devtools.artifactregistry.v1.Tag, com.google.devtools.artifactregistry.v1.Tag.Builder, com.google.devtools.artifactregistry.v1.TagOrBuilder>( tags_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); tags_ = null; } return tagsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * The token to retrieve the next page of tags, or empty if there are no * more tags to return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The token to retrieve the next page of tags, or empty if there are no * more tags to return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The token to retrieve the next page of tags, or empty if there are no * more tags to return. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The token to retrieve the next page of tags, or empty if there are no * more tags to return. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The token to retrieve the next page of tags, or empty if there are no * more tags to return. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.devtools.artifactregistry.v1.ListTagsResponse) } // @@protoc_insertion_point(class_scope:google.devtools.artifactregistry.v1.ListTagsResponse) private static final com.google.devtools.artifactregistry.v1.ListTagsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.devtools.artifactregistry.v1.ListTagsResponse(); } public static com.google.devtools.artifactregistry.v1.ListTagsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListTagsResponse> PARSER = new com.google.protobuf.AbstractParser<ListTagsResponse>() { @java.lang.Override public ListTagsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListTagsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListTagsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.devtools.artifactregistry.v1.ListTagsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
google/closure-templates
34,893
java/src/com/google/template/soy/javagencode/GenerateBuildersVisitor.java
/* * Copyright 2019 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.template.soy.javagencode; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.template.soy.base.SourceLocation.UNKNOWN; import static com.google.template.soy.javagencode.javatypes.CodeGenUtils.ADD_TO_LIST_PARAM; import static com.google.template.soy.javagencode.javatypes.CodeGenUtils.AS_RECORD; import static com.google.template.soy.javagencode.javatypes.CodeGenUtils.INDIRECT_P; import static com.google.template.soy.javagencode.javatypes.CodeGenUtils.INIT_LIST_PARAM; import static com.google.template.soy.javagencode.javatypes.CodeGenUtils.INJECTED_P; import static com.google.template.soy.javagencode.javatypes.CodeGenUtils.SET_PARAM_INTERNAL; import static com.google.template.soy.javagencode.javatypes.CodeGenUtils.STANDARD_P; import static com.google.template.soy.javagencode.javatypes.CodeGenUtils.maybeAddNullableToClass; import static com.google.template.soy.shared.internal.gencode.JavaGenerationUtils.appendFunctionCallWithParamsOnNewLines; import static com.google.template.soy.shared.internal.gencode.JavaGenerationUtils.appendJavadoc; import static com.google.template.soy.shared.internal.gencode.JavaGenerationUtils.isReservedKeyword; import static com.google.template.soy.shared.internal.gencode.JavaGenerationUtils.makeLowerCamelCase; import static com.google.template.soy.shared.internal.gencode.JavaGenerationUtils.makeUpperCamelCase; import static java.util.stream.Collectors.counting; import static java.util.stream.Collectors.groupingBy; import static java.util.stream.Collectors.joining; import static java.util.stream.Collectors.toList; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.template.soy.base.SourceLocation.ByteSpan; import com.google.template.soy.base.internal.KytheMode; import com.google.template.soy.error.ErrorReporter; import com.google.template.soy.error.SoyErrorKind; import com.google.template.soy.javagencode.SoyFileNodeTransformer.FileInfo; import com.google.template.soy.javagencode.SoyFileNodeTransformer.ParamInfo; import com.google.template.soy.javagencode.SoyFileNodeTransformer.ParamStatus; import com.google.template.soy.javagencode.SoyFileNodeTransformer.TemplateInfo; import com.google.template.soy.javagencode.javatypes.CodeGenUtils; import com.google.template.soy.javagencode.javatypes.CollectionJavaType; import com.google.template.soy.javagencode.javatypes.FutureJavaType; import com.google.template.soy.javagencode.javatypes.JavaType; import com.google.template.soy.javagencode.javatypes.RecordJavaType; import com.google.template.soy.shared.internal.gencode.GeneratedFile; import com.google.template.soy.shared.internal.gencode.IndentedLinesBuilder; import com.google.template.soy.soytree.AbstractSoyNodeVisitor; import com.google.template.soy.soytree.FileSetMetadata; import com.google.template.soy.soytree.SoyFileNode; import com.google.template.soy.soytree.SoyFileSetNode; import com.google.template.soy.soytree.SoyNode; import com.google.template.soy.soytree.SoyTreeUtils; import com.google.template.soy.soytree.TemplateNode; import com.google.template.soy.soytree.defn.TemplateHeaderVarDefn; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; /** * Visitor for generating Java template parameter builders (see {@link * com.google.template.soy.data.BaseSoyTemplateImpl}) that can be used for invoking Soy templates * from Java. One java file will be generated for each soy file, containing template param builders * for each template in the soy file. * * <p>For example, "foo.soy" containing templates "bar" and "baz" would result in FooTemplates.java, * with inner classes Bar and Baz. */ public final class GenerateBuildersVisitor extends AbstractSoyNodeVisitor<ImmutableList<GeneratedFile>> { private static final String TEMPLATE_NAME_FIELD = "__NAME__"; private static final String PARAMS_FIELD = "__PARAMS__"; private static final String DEFAULT_INSTANCE_FIELD = "__DEFAULT_INSTANCE__"; private static final SoyErrorKind TYPE_COLLISION = SoyErrorKind.of( "Parameter ''{0}'' in {1} has different types in different templates. No parameter" + " setter generated."); private static final SoyErrorKind INDIRECT_PROTO = SoyErrorKind.of( "Parameter ''{0}'' in {1} depends on a proto or proto enum that is not a direct" + " dependency of this library. No parameter setter generated."); private static final SoyErrorKind TEMPLATE_NAME_COLLISION = SoyErrorKind.of( "When generating Soy Java Template Builders, the template: {0} generated the same Java" + " UpperCamelCase name as another template in this file, or collided with a" + " reserved identifier: " + SoyFileNodeTransformer.RESERVED_IDENTIFIERS + ". This template was skipped during Soy java_builders generation. To use this API," + " all Soy template names in a given file should be unique when converted to" + " UpperCamelCase (with non-alphanumeric characters stripped). The generated Java" + " class name was: {1}."); private static final SoyErrorKind PARAM_NAME_COLLISION = SoyErrorKind.of( "When generating Soy Java Template Builders, the param named {0} in template {1}" + " generated the same UpperCamelCase name as another parameter, or collided with" + " a reserved identifier: " + SoyFileNodeTransformer.RESERVED_IDENTIFIERS + ". Param: {0} is being skipped (no setters will be generated for this param). The" + " generated setter name was: {2}. To use this API, all parameter names for a given" + " template should be unique when converted to UpperCamelCase (with" + " non-alphanumeric characters stripped)."); private static final SoyErrorKind SETTER_SIGNATURE_COLLISION = SoyErrorKind.of( "When generating Soy Java Template Builders, the param named {0} in template {1}" + " generated a setter with the same signature as another setter for the same param: " + "`{2}`. This can happen with unions since one setter for each union member is " + "generated. In case of collisions, a setter for the first type, when read left-" + "-to-right, is emitted."); private static final SoyErrorKind FILE_NAME_COLLISION = SoyErrorKind.of( "While generating Soy Java invocation builders, multiple files in this soy fileset" + " mapped to the same file name: {0}. To use this api, soy file names should be" + " unique when converted to UpperCamelCase (with non-alpha-numeric characters" + " stripped)."); private static final SoyErrorKind FUTURE_NAME_COLLISION = SoyErrorKind.of( "Achievement unlocked. You have a template with parameters named {0} and" + " {0}Future, preventing a future setter from being created for the first" + " parameter."); private final ErrorReporter errorReporter; private final KytheMode kytheMode; private final SoyFileNodeTransformer transformer; private IndentedLinesBuilder ilb; // Line formatter for the generated code. private KytheHelper kytheHelper; private ImmutableList.Builder<GeneratedFile> generatedFiles; // The generated Java files to write. public GenerateBuildersVisitor( ErrorReporter errorReporter, String javaPackage, KytheMode kytheMode, FileSetMetadata registry) { this.errorReporter = errorReporter; this.kytheMode = kytheMode; this.transformer = new SoyFileNodeTransformer(javaPackage, registry); } @Override public ImmutableList<GeneratedFile> exec(SoyNode node) { generatedFiles = new ImmutableList.Builder<>(); ilb = null; kytheHelper = null; visit(node); ImmutableList<GeneratedFile> builtFileList = generatedFiles.build(); logWarningIfFilenamesNotUnique(builtFileList); return builtFileList; } @Override protected void visitSoyFileSetNode(SoyFileSetNode node) { for (SoyFileNode soyFile : node.getChildren()) { visit(soyFile); } } @Override protected void visitSoyFileNode(SoyFileNode soyFile) { FileInfo fileInfo = transformer.transform(soyFile); kytheHelper = new KytheHelper(soyFile.getFilePath()); ilb = new IndentedLinesBuilder(kytheHelper); appendFileHeaderAndImports(fileInfo); String javaClassNameForSoyFile = fileInfo.className(); // Start of *FooTemplates class. appendJavadoc( ilb, "Wrapper class containing {@link com.google.template.soy.data.SoyTemplate} builders for" + " each template in: " + fileInfo.soyFileName() + ".", /* forceMultiline= */ false, /* wrapAt100Chars= */ true); ilb.appendLine( "@javax.annotation.Generated(\n" + " value = \"com.google.template.soy.SoyParseInfoGenerator\"" + (kytheMode.isEnabled() ? ",\n comments = \"kythe-inline-metadata:kythe-inline-metadata\"" : "") + ")"); ilb.appendLineStart("public final class ") .appendImputee(javaClassNameForSoyFile, getByteSpan(soyFile)) .appendLineEnd(" {"); ilb.increaseIndent(); // Add FooParams subclasses for the templates in this file. generateParamsClassesForEachTemplate(fileInfo); // End of *FooTemplates class. ilb.decreaseIndent(); ilb.appendLine("}"); String kytheComment = getKytheComment(kytheMode, kytheHelper); // Add the file name and contents to the list of generated files to write. String fileName = javaClassNameForSoyFile + ".java"; generatedFiles.add(GeneratedFile.create("", fileName, ilb + kytheComment)); ilb = null; } private static ByteSpan getByteSpan(SoyFileNode node) { return SoyTreeUtils.getByteSpan(node, node.getNamespaceDeclaration().getNamespaceLocation()); } private static ByteSpan getByteSpan(TemplateInfo node) { return SoyTreeUtils.getByteSpan(node.template(), node.template().getTemplateNameLocation()); } private ByteSpan getByteSpan(TemplateInfo template, ParamInfo param) { TemplateNode t = template.template(); Optional<TemplateHeaderVarDefn> match = t.getHeaderParams().stream().filter(p -> p.name().equals(param.name())).findFirst(); return match.isPresent() ? SoyTreeUtils.getByteSpan(template.template(), match.get().nameLocation()) : ByteSpan.UNKNOWN; } static String getKytheComment(KytheMode kytheMode, KytheHelper kytheHelper) { StringBuilder sb = new StringBuilder(); kytheHelper.appendGeneratedCodeInfo(kytheMode, sb); return sb.toString(); } /** For each public, non-delegate template in the given soy file, generates a Foo inner class. */ private void generateParamsClassesForEachTemplate(FileInfo soyFile) { soyFile .templates() .forEach( t -> { switch (t.status()) { case HANDLED: visitTemplateInfo(t); break; case NAME_COLLISION: errorReporter.warn( t.sourceLocation(), TEMPLATE_NAME_COLLISION, t.templateName(), t.className()); break; } }); } /** * Writes a Foo subclass for the given template. The class extends {@link * com.google.template.soy.data.BaseSoyTemplateImpl}, which implements {@link * com.google.template.soy.data.SoyTemplate}. */ private void visitTemplateInfo(TemplateInfo template) { String templateClass = template.className(); // Start of Foo class. String templateDescription = template.soyDocDesc(); ilb.appendLine(); appendJavadoc( ilb, "Template params for " + template.templateNameForUserMsgs() + (templateDescription != null ? ": " + templateDescription : "."), /* forceMultiline= */ false, /* wrapAt100Chars= */ true); ilb.appendLineStart("public static final class ") .appendImputee(templateClass, getByteSpan(template)) .appendLineEnd(" extends com.google.template.soy.data.BaseSoyTemplateImpl {"); ilb.increaseIndent(); ilb.appendLine(); ilb.appendLine( "private static final java.lang.String " + TEMPLATE_NAME_FIELD + " = \"" + template.templateName() + "\";"); ilb.appendLine(); appendFutureWrapperMethod(templateClass); // Constructor for Foo. ilb.appendLine("private " + templateClass + "(" + templateClass + ".Builder builder) {"); ilb.increaseIndent(); ilb.appendLine("super(builder);"); ilb.decreaseIndent(); ilb.appendLine("}"); ilb.appendLine(); ilb.appendLine("@java.lang.Override"); ilb.appendLine("public final java.lang.String getTemplateName() {"); ilb.increaseIndent(); ilb.appendLine("return " + TEMPLATE_NAME_FIELD + ";"); ilb.decreaseIndent(); ilb.appendLine("}"); ilb.appendLine(); appendParamsBuilderClass(template, templateClass); // End of Foo class. ilb.decreaseIndent(); ilb.appendLine("}"); ilb.appendLine(); } /** * Adds a static method to each Params class: {@code public static SoyTemplate.AsyncWrapper<Foo> * wrapFuture(ListenableFuture<Foo>)}. This utility is needed for supporting Producers + some Apps * Framework utility classes. * * @see com.google.apps.framework.template.StructuredPageResponse */ private void appendFutureWrapperMethod(String paramsClass) { appendJavadoc( ilb, "Wraps a ListenableFuture<" + paramsClass + "> as a SoyTemplate.AsyncWrapper<" + paramsClass + ">", false, true); ilb.appendLine( "public static com.google.template.soy.data.SoyTemplate.AsyncWrapper<" + paramsClass + "> wrapFuture(com.google.common.util.concurrent.ListenableFuture<" + paramsClass + "> paramsFuture) {"); ilb.increaseIndent(); ilb.appendLine( "return new com.google.template.soy.data.SoyTemplate.AsyncWrapper<>(" + TEMPLATE_NAME_FIELD + ", paramsFuture);"); ilb.decreaseIndent(); ilb.appendLine("}"); ilb.appendLine(); } /** * Appends a builder class for template "foo" with parameter setting methods. This class extends * the {@link com.google.template.soy.data.BaseSoyTemplateImpl.AbstractBuilder} class. */ private void appendParamsBuilderClass(TemplateInfo template, String templateParamsClassname) { appendJavadoc(ilb, "Creates a new Builder instance.", false, true); ilb.appendLineStart("public static Builder ") .appendImputee("builder", getByteSpan(template)) .appendLineEnd("() {"); ilb.increaseIndent(); ilb.appendLine("return new Builder();"); ilb.decreaseIndent(); ilb.appendLine("}"); ilb.appendLine(); // Will contain handled and unhandled params. We include some types of unhandled params so that // they still end up in the generated list of params. List<ParamInfo> combinedParams = template.params().stream() .filter( info -> { switch (info.status()) { case HANDLED: case UNHANDLED_TYPE: return true; case NAME_COLLISION: errorReporter.warn( info.sourceLocation(), PARAM_NAME_COLLISION, info.name(), template.templateName(), info.setterName()); return true; case JAVA_INCOMPATIBLE: break; case INDIRECT_INCOMPATIBLE_TYPES: errorReporter.warn( info.sourceLocation(), TYPE_COLLISION, info.name(), template.templateName()); break; case INDIRECT_PROTO: errorReporter.warn( info.sourceLocation(), INDIRECT_PROTO, info.name(), template.templateName()); break; } return false; }) .collect(toList()); appendParamConstants(ilb, template, combinedParams); List<ParamInfo> nonInjectedParams = combinedParams.stream().filter(p -> !p.injected()).collect(toList()); if (nonInjectedParams.stream().noneMatch(ParamInfo::requiredAndNotIndirect)) { // Invoke the constructor directly. For these templates it could allow callers to avoid // loading the builder completely. ilb.appendLine( "private static final " + templateParamsClassname + " " + DEFAULT_INSTANCE_FIELD + " = new " + templateParamsClassname + "(builder());"); ilb.appendLine(); appendJavadoc( ilb, "Creates a new instance of " + templateParamsClassname + " with no parameters set. This method was generated because all template" + " parameters are optional.", false, true); ilb.appendLineStart("public static ", templateParamsClassname, " ") .appendImputee("getDefaultInstance", getByteSpan(template)) .appendLineEnd("() {"); ilb.increaseIndent(); ilb.appendLine("return " + DEFAULT_INSTANCE_FIELD + ";"); ilb.decreaseIndent(); ilb.appendLine("}"); ilb.appendLine(); } boolean anyAccumulatorParameters = nonInjectedParams.stream() .flatMap(param -> param.javaTypes().stream()) .anyMatch( javaType -> javaType instanceof RecordJavaType && ((RecordJavaType) javaType).isList()); // Start of Foo.Builder class. ilb.appendLineStart("public static final class ") .appendImputee("Builder", getByteSpan(template)) .appendLineEnd( " extends com.google.template.soy.data.BaseSoyTemplateImpl.", (anyAccumulatorParameters ? "AbstractBuilderWithAccumulatorParameters" : "AbstractBuilder"), "<Builder, ", templateParamsClassname, "> {"); ilb.appendLine(); ilb.increaseIndent(); // Constructor for Foo.Builder. ilb.appendLine("private Builder() {"); ilb.increaseIndent(); ilb.appendLine( "super(", nonInjectedParams.size(), ", " + nonInjectedParams.stream().filter(pi -> pi.required() && !pi.indirect()).count() + ");"); appendRecordListInitializations(ilb, nonInjectedParams); ilb.decreaseIndent(); ilb.appendLine("}"); ilb.appendLine(); // #allParams() for FooTemplate.Builder. ilb.appendLine("@java.lang.Override"); ilb.appendLine( "protected" + " com.google.common.collect.ImmutableSet<com.google.template.soy.data.SoyTemplateParam<?>>" + " allParams() {"); ilb.increaseIndent(); ilb.appendLine("return " + PARAMS_FIELD + ";"); ilb.decreaseIndent(); ilb.appendLine("}"); ilb.appendLine(); // #build() for FooTemplate.Builder. ilb.appendLine("@java.lang.Override"); ilb.appendLine("public " + templateParamsClassname + " build() {"); ilb.increaseIndent(); // Flush any accumulator parameters. if (anyAccumulatorParameters) { ilb.appendLine("prepareDataForBuild();"); } ilb.appendLine("return new " + templateParamsClassname + "(this);"); ilb.decreaseIndent(); ilb.appendLine("}"); ilb.appendLine(); // #buildPartial() for FooTemplate.Builder. ilb.appendLine("@java.lang.Override"); ilb.appendLine("public com.google.template.soy.data.PartialSoyTemplate buildPartial() {"); ilb.increaseIndent(); if (anyAccumulatorParameters) { ilb.appendLine("prepareDataForBuild();"); } ilb.appendLine("return doBuildPartial(" + TEMPLATE_NAME_FIELD + ");"); ilb.decreaseIndent(); ilb.appendLine("}"); // Add setters for each direct template param. nonInjectedParams.stream() .filter(p -> p.status() == ParamStatus.HANDLED) .forEach(p -> writeSettersForParam(p, template)); ilb.appendLine(); // End of FooTemplateInvocation.Builder class. ilb.decreaseIndent(); ilb.appendLine("}"); } private void appendParamConstants( IndentedLinesBuilder ilb, TemplateInfo template, List<ParamInfo> params) { Set<String> usedNames = new HashSet<>(); List<String> nonInjected = new ArrayList<>(); for (ParamInfo param : params) { while (usedNames.contains(param.constantFieldName())) { param.updateConstantFieldName(); } String fieldName = param.constantFieldName(); usedNames.add(fieldName); if (!param.injected()) { nonInjected.add(fieldName); } String genericType = "?"; List<JavaType> types = param.javaTypes(); if (types.size() == 1) { JavaType javaType = types.get(0); // this is basically 'instanceof RecordJavaType' at this point if (javaType.isTypeLiteralSupported()) { genericType = javaType.asTypeLiteralString(); } } // Make any param that supports type literal public so it can be used with // TemplateParamModule, SoyTemplateData, AbstractBuilder, and tests. Union types, records, and // CSS params will be private since they can't be represented as a single specific type // literal. String visibility = !"?".equals(genericType) ? "public" : "private"; // These values correspond to static factory methods on SoyTemplateParam. CodeGenUtils.Member factory = STANDARD_P; if (param.injected()) { factory = INJECTED_P; } else if (param.indirect()) { factory = INDIRECT_P; } String paramDescription = param.param().getDescription(); if (paramDescription == null) { paramDescription = ""; } else { paramDescription += " "; } String typeToken = "?".equals(genericType) // TODO(jcg): this should probably be a wildcard type ? "com.google.common.reflect.TypeToken.of(java.lang.Object.class)" : (genericType.matches("(\\.|\\w)+") ? "com.google.common.reflect.TypeToken.of(" + genericType + ".class" + ")" : "new com.google.common.reflect.TypeToken<" + genericType + ">() {}"); ilb.appendLine( String.format( "/** {@%s %s} %s*/", param.injected() ? "inject" : "param", param.name(), paramDescription)); ilb.appendLine( String.format( "%s static final com.google.template.soy.data.SoyTemplateParam<%s>", visibility, genericType)); ilb.increaseIndent(2); ilb.appendLineStart() .appendImputee(fieldName, getByteSpan(template, param)) .appendLineEnd(" ="); ilb.increaseIndent(2); ilb.appendLine(factory, "("); ilb.increaseIndent(2); ilb.appendLine("\"", param.name(), "\","); ilb.appendLine("/* required= */ ", param.required(), ","); ilb.appendLine(typeToken, ");"); ilb.decreaseIndent(6); ilb.appendLine(); } ilb.appendLineStart( "private static final" + " com.google.common.collect.ImmutableSet<com.google.template.soy.data.SoyTemplateParam<?>>" + " " + PARAMS_FIELD + " = "); // Omit injected params from the list of params passed to the builder. appendFunctionCallWithParamsOnNewLines( ilb, "com.google.common.collect.ImmutableSet.of", nonInjected); ilb.appendLineEnd(";"); ilb.appendLine(); } private static void appendRecordListInitializations( IndentedLinesBuilder ilb, List<ParamInfo> params) { // For every required param that's of type list<[...]> (list of records), initialize the list // so that upon building the template we do not throw an error for zero records. for (ParamInfo param : params) { if (param.required()) { List<JavaType> types = param.javaTypes(); if (types.size() == 1 && types.get(0) instanceof RecordJavaType && ((RecordJavaType) types.get(0)).isList()) { ilb.appendLine(String.format("%s(%s);", INIT_LIST_PARAM, param.constantFieldName())); } } } } /** Appends the file header and imports for the generated *FooTemplates.java */ private void appendFileHeaderAndImports(FileInfo soyFile) { // Header. ilb.appendLine("// This file was automatically generated by the Soy compiler."); ilb.appendLine("// Please don't edit this file by hand."); ilb.appendLine("// source: " + soyFile.soyFilePath().path()); // For Code Search link. ilb.appendLine(); ilb.appendLine("package " + soyFile.packageName() + ";"); ilb.appendLine(); ilb.appendLine(); // No Imports! // It is annoying and verbose but by fully qualifying all type names we can avoid conflicts // with user defined symbols } /** * Writes setter methods each of the java types that this param can be (e.g union int | string * would generate setFoo(int) and setFoo(string)). * * <p>TODO(b/77550695): Update docs for how we handle futures. */ private void writeSettersForParam(ParamInfo param, TemplateInfo template) { // Add setters for this param. Set<String> signatures = new HashSet<>(); for (JavaType javaType : param.javaTypes()) { String signature = javaType instanceof RecordJavaType ? ((RecordJavaType) javaType) .getJavaTypeMap().values().stream() .map(JavaType::toJavaTypeString) .collect(joining(",")) : javaType.toJavaTypeString(); // Collisions are possible in unions containing records with a single property or unions with // records with the same types. if (!signatures.add(signature)) { errorReporter.warn( param.sourceLocation(), SETTER_SIGNATURE_COLLISION, param.name(), template.templateName(), String.format("%s(%s)", param.setterName(), signature)); } else { writeSetter(param, template, javaType); } } // For now only write the future interface if the setter is not already overloaded switch (param.futureStatus()) { case HANDLED: for (JavaType futureType : param.futureTypes()) { writeFutureSetter(ilb, param, template, new FutureJavaType(futureType)); if (futureType instanceof CollectionJavaType) { CollectionJavaType collectionType = (CollectionJavaType) futureType; writeCollectionFutureSetter( ilb, param, template, new CollectionJavaType( collectionType.getSubtype(), new FutureJavaType(collectionType.getElementType()), collectionType.isNullable())); } } break; case NAME_COLLISION: errorReporter.warn(param.sourceLocation(), FUTURE_NAME_COLLISION, param.name()); break; case UNHANDLED: break; } } /** Writes a setter method for the given param and java type. */ private void writeSetter(ParamInfo param, TemplateInfo template, JavaType javaType) { String paramDescription = param.param().getDescription(); ilb.appendLine(); appendJavadoc( ilb, "Sets " + param.name() + (Strings.isNullOrEmpty(paramDescription) ? "." : ": " + paramDescription), /* forceMultiline= */ false, /* wrapAt100Chars= */ true); if (javaType instanceof RecordJavaType) { writeRecordSetter(param, template, (RecordJavaType) javaType); } else { String javaTypeString = javaType.toJavaTypeString(); boolean nullable = javaType.isNullable(); ilb.appendLine("@com.google.errorprone.annotations.CanIgnoreReturnValue"); ilb.appendLineStart("public Builder ") .appendImputee(param.setterName(), getByteSpan(template, param)) .appendLineEnd("(", maybeAddNullableToClass(nullable, javaTypeString), " value) {"); ilb.increaseIndent(); String newVariableName = javaType.asInlineCast("value"); ilb.appendLine( "return " + SET_PARAM_INTERNAL + "(", param.constantFieldName(), ", ", newVariableName, ");"); ilb.decreaseIndent(); ilb.appendLine("}"); } } private void writeRecordSetter(ParamInfo param, TemplateInfo template, RecordJavaType type) { ilb.appendLine("@com.google.errorprone.annotations.CanIgnoreReturnValue"); ilb.appendLineStart("public Builder ") .appendImputee( type.isList() ? param.adderName() : param.setterName(), getByteSpan(template, param)) .appendLineMiddle("("); List<String> paramNames = type.getJavaTypeMap().keySet().asList(); List<String> javaParamNames = new ArrayList<>(); boolean first = true; for (Map.Entry<String, JavaType> entry : type.getJavaTypeMap().entrySet()) { String paramName = makeParamName(entry.getKey()); javaParamNames.add(paramName); if (!first) { ilb.appendLineMiddle(", "); } JavaType paramType = entry.getValue(); ilb.appendLineMiddle( maybeAddNullableToClass(paramType.isNullable(), paramType.toJavaTypeString()), " ", paramName); first = false; } ilb.appendLineEnd(") {"); ilb.increaseIndent(); CodeGenUtils.Member delegate = type.isList() ? ADD_TO_LIST_PARAM : SET_PARAM_INTERNAL; ilb.appendLineStart( "return ", delegate, "(", param.constantFieldName(), ", " + AS_RECORD + "("); int numParams = paramNames.size(); for (int i = 0; i < numParams; i++) { if (i != 0) { ilb.appendLineMiddle(", "); } ilb.appendLineMiddle( "\"", paramNames.get(i), "\", ", type.getJavaTypeMap().get(paramNames.get(i)).asInlineCast(javaParamNames.get(i))); } ilb.appendLineEnd("));"); ilb.decreaseIndent(); ilb.appendLine("}"); } /** Writes a setter method for the given param and java type. */ private void writeFutureSetter( IndentedLinesBuilder ilb, ParamInfo param, TemplateInfo template, FutureJavaType javaType) { ilb.appendLine(); appendJavadoc( ilb, "Future compatible version of {@link #" + param.setterName() + "(" + stripGenerics(javaType.getType().toJavaTypeString()) + ")}.", /* forceMultiline= */ false, /* wrapAt100Chars= */ true); ilb.appendLine("@com.google.errorprone.annotations.CanIgnoreReturnValue"); ilb.appendLineStart("public Builder ") .appendImputee(param.futureSetterName(), getByteSpan(template, param)) .appendLineEnd("(", javaType.toJavaTypeString(), " future) {"); ilb.increaseIndent(); ilb.appendLine( "return " + SET_PARAM_INTERNAL + "(" + param.constantFieldName() + ", " + javaType.asInlineCast("future") + ");"); ilb.decreaseIndent(); ilb.appendLine("}"); } /** Writes a setter method for the given param and java type. */ private void writeCollectionFutureSetter( IndentedLinesBuilder ilb, ParamInfo param, TemplateInfo template, CollectionJavaType javaType) { ilb.appendLine(); appendJavadoc( ilb, "Future compatible collection version of {@link #" + param.setterName() + "(" + stripGenerics(javaType.toJavaTypeString()) + ")}.", /* forceMultiline= */ false, /* wrapAt100Chars= */ true); ilb.appendLine("@com.google.errorprone.annotations.CanIgnoreReturnValue"); String variableName = makeLowerCamelCase(javaType.getSubtype().name()); ilb.appendLineStart("public Builder ") .appendImputee( param.futureSetterName() + makeUpperCamelCase(javaType.getSubtype().name()), getByteSpan(template, param)) .appendLineEnd("(", javaType.toJavaTypeString(), " " + variableName + ") {"); ilb.increaseIndent(); ilb.appendLine( "return " + SET_PARAM_INTERNAL + "(" + param.constantFieldName() + ", " + javaType.asInlineCast(variableName) + ");"); ilb.decreaseIndent(); ilb.appendLine("}"); } private static String stripGenerics(String type) { String newType = type; do { type = newType; newType = type.replaceAll("<[^>]*>*", ""); } while (!newType.equals(type)); return newType; } /** Logs a warning if two soy files mapped to the same generated java file name. */ private void logWarningIfFilenamesNotUnique(ImmutableList<GeneratedFile> files) { ImmutableList<String> duplicateFilenames = files.stream().collect(groupingBy(GeneratedFile::fileName, counting())).entrySet().stream() .filter(e -> e.getValue() > 1) // We only care about duplicate filenames. .map(Map.Entry::getKey) .collect(toImmutableList()); for (String fileName : duplicateFilenames) { errorReporter.warn(UNKNOWN, FILE_NAME_COLLISION, fileName); } } private static String makeParamName(String s) { s = makeLowerCamelCase(s); return isReservedKeyword(s) ? s + "_" : s; } }
googleapis/google-cloud-java
35,104
java-network-management/proto-google-cloud-network-management-v1/src/main/java/com/google/cloud/networkmanagement/v1/UpdateConnectivityTestRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/networkmanagement/v1/reachability.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.networkmanagement.v1; /** * * * <pre> * Request for the `UpdateConnectivityTest` method. * </pre> * * Protobuf type {@code google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest} */ public final class UpdateConnectivityTestRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest) UpdateConnectivityTestRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateConnectivityTestRequest.newBuilder() to construct. private UpdateConnectivityTestRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateConnectivityTestRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateConnectivityTestRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.networkmanagement.v1.ReachabilityServiceProto .internal_static_google_cloud_networkmanagement_v1_UpdateConnectivityTestRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.networkmanagement.v1.ReachabilityServiceProto .internal_static_google_cloud_networkmanagement_v1_UpdateConnectivityTestRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest.class, com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest.Builder.class); } private int bitField0_; public static final int UPDATE_MASK_FIELD_NUMBER = 1; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Required. Mask of fields to update. At least one path must be supplied in * this field. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Mask of fields to update. At least one path must be supplied in * this field. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Required. Mask of fields to update. At least one path must be supplied in * this field. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } public static final int RESOURCE_FIELD_NUMBER = 2; private com.google.cloud.networkmanagement.v1.ConnectivityTest resource_; /** * * * <pre> * Required. Only fields specified in update_mask are updated. * </pre> * * <code> * .google.cloud.networkmanagement.v1.ConnectivityTest resource = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the resource field is set. */ @java.lang.Override public boolean hasResource() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Only fields specified in update_mask are updated. * </pre> * * <code> * .google.cloud.networkmanagement.v1.ConnectivityTest resource = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The resource. */ @java.lang.Override public com.google.cloud.networkmanagement.v1.ConnectivityTest getResource() { return resource_ == null ? com.google.cloud.networkmanagement.v1.ConnectivityTest.getDefaultInstance() : resource_; } /** * * * <pre> * Required. Only fields specified in update_mask are updated. * </pre> * * <code> * .google.cloud.networkmanagement.v1.ConnectivityTest resource = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.networkmanagement.v1.ConnectivityTestOrBuilder getResourceOrBuilder() { return resource_ == null ? com.google.cloud.networkmanagement.v1.ConnectivityTest.getDefaultInstance() : resource_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getUpdateMask()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getResource()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getUpdateMask()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getResource()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest)) { return super.equals(obj); } com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest other = (com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest) obj; if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (hasResource() != other.hasResource()) return false; if (hasResource()) { if (!getResource().equals(other.getResource())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } if (hasResource()) { hash = (37 * hash) + RESOURCE_FIELD_NUMBER; hash = (53 * hash) + getResource().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request for the `UpdateConnectivityTest` method. * </pre> * * Protobuf type {@code google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest) com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.networkmanagement.v1.ReachabilityServiceProto .internal_static_google_cloud_networkmanagement_v1_UpdateConnectivityTestRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.networkmanagement.v1.ReachabilityServiceProto .internal_static_google_cloud_networkmanagement_v1_UpdateConnectivityTestRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest.class, com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest.Builder.class); } // Construct using // com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getUpdateMaskFieldBuilder(); getResourceFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } resource_ = null; if (resourceBuilder_ != null) { resourceBuilder_.dispose(); resourceBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.networkmanagement.v1.ReachabilityServiceProto .internal_static_google_cloud_networkmanagement_v1_UpdateConnectivityTestRequest_descriptor; } @java.lang.Override public com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest getDefaultInstanceForType() { return com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest build() { com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest buildPartial() { com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest result = new com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.resource_ = resourceBuilder_ == null ? resource_ : resourceBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest) { return mergeFrom( (com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest other) { if (other == com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest .getDefaultInstance()) return this; if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } if (other.hasResource()) { mergeResource(other.getResource()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getResourceFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Required. Mask of fields to update. At least one path must be supplied in * this field. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Mask of fields to update. At least one path must be supplied in * this field. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Required. Mask of fields to update. At least one path must be supplied in * this field. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Mask of fields to update. At least one path must be supplied in * this field. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Mask of fields to update. At least one path must be supplied in * this field. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. Mask of fields to update. At least one path must be supplied in * this field. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000001); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Mask of fields to update. At least one path must be supplied in * this field. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000001; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Mask of fields to update. At least one path must be supplied in * this field. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Required. Mask of fields to update. At least one path must be supplied in * this field. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } private com.google.cloud.networkmanagement.v1.ConnectivityTest resource_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.networkmanagement.v1.ConnectivityTest, com.google.cloud.networkmanagement.v1.ConnectivityTest.Builder, com.google.cloud.networkmanagement.v1.ConnectivityTestOrBuilder> resourceBuilder_; /** * * * <pre> * Required. Only fields specified in update_mask are updated. * </pre> * * <code> * .google.cloud.networkmanagement.v1.ConnectivityTest resource = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the resource field is set. */ public boolean hasResource() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Only fields specified in update_mask are updated. * </pre> * * <code> * .google.cloud.networkmanagement.v1.ConnectivityTest resource = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The resource. */ public com.google.cloud.networkmanagement.v1.ConnectivityTest getResource() { if (resourceBuilder_ == null) { return resource_ == null ? com.google.cloud.networkmanagement.v1.ConnectivityTest.getDefaultInstance() : resource_; } else { return resourceBuilder_.getMessage(); } } /** * * * <pre> * Required. Only fields specified in update_mask are updated. * </pre> * * <code> * .google.cloud.networkmanagement.v1.ConnectivityTest resource = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setResource(com.google.cloud.networkmanagement.v1.ConnectivityTest value) { if (resourceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } resource_ = value; } else { resourceBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Only fields specified in update_mask are updated. * </pre> * * <code> * .google.cloud.networkmanagement.v1.ConnectivityTest resource = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setResource( com.google.cloud.networkmanagement.v1.ConnectivityTest.Builder builderForValue) { if (resourceBuilder_ == null) { resource_ = builderForValue.build(); } else { resourceBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Only fields specified in update_mask are updated. * </pre> * * <code> * .google.cloud.networkmanagement.v1.ConnectivityTest resource = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeResource(com.google.cloud.networkmanagement.v1.ConnectivityTest value) { if (resourceBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && resource_ != null && resource_ != com.google.cloud.networkmanagement.v1.ConnectivityTest.getDefaultInstance()) { getResourceBuilder().mergeFrom(value); } else { resource_ = value; } } else { resourceBuilder_.mergeFrom(value); } if (resource_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. Only fields specified in update_mask are updated. * </pre> * * <code> * .google.cloud.networkmanagement.v1.ConnectivityTest resource = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearResource() { bitField0_ = (bitField0_ & ~0x00000002); resource_ = null; if (resourceBuilder_ != null) { resourceBuilder_.dispose(); resourceBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Only fields specified in update_mask are updated. * </pre> * * <code> * .google.cloud.networkmanagement.v1.ConnectivityTest resource = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.networkmanagement.v1.ConnectivityTest.Builder getResourceBuilder() { bitField0_ |= 0x00000002; onChanged(); return getResourceFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Only fields specified in update_mask are updated. * </pre> * * <code> * .google.cloud.networkmanagement.v1.ConnectivityTest resource = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.networkmanagement.v1.ConnectivityTestOrBuilder getResourceOrBuilder() { if (resourceBuilder_ != null) { return resourceBuilder_.getMessageOrBuilder(); } else { return resource_ == null ? com.google.cloud.networkmanagement.v1.ConnectivityTest.getDefaultInstance() : resource_; } } /** * * * <pre> * Required. Only fields specified in update_mask are updated. * </pre> * * <code> * .google.cloud.networkmanagement.v1.ConnectivityTest resource = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.networkmanagement.v1.ConnectivityTest, com.google.cloud.networkmanagement.v1.ConnectivityTest.Builder, com.google.cloud.networkmanagement.v1.ConnectivityTestOrBuilder> getResourceFieldBuilder() { if (resourceBuilder_ == null) { resourceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.networkmanagement.v1.ConnectivityTest, com.google.cloud.networkmanagement.v1.ConnectivityTest.Builder, com.google.cloud.networkmanagement.v1.ConnectivityTestOrBuilder>( getResource(), getParentForChildren(), isClean()); resource_ = null; } return resourceBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest) } // @@protoc_insertion_point(class_scope:google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest) private static final com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest(); } public static com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateConnectivityTestRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateConnectivityTestRequest>() { @java.lang.Override public UpdateConnectivityTestRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateConnectivityTestRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateConnectivityTestRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/systemds
33,338
src/main/java/org/apache/sysds/runtime/compress/colgroup/dictionary/DictionaryFactory.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysds.runtime.compress.colgroup.dictionary; import java.io.DataInput; import java.io.IOException; import java.util.List; import org.apache.commons.lang3.NotImplementedException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.sysds.runtime.compress.DMLCompressionException; import org.apache.sysds.runtime.compress.bitmap.ABitmap; import org.apache.sysds.runtime.compress.bitmap.Bitmap; import org.apache.sysds.runtime.compress.bitmap.MultiColBitmap; import org.apache.sysds.runtime.compress.colgroup.AColGroup.CompressionType; import org.apache.sysds.runtime.compress.colgroup.AColGroupCompressed; import org.apache.sysds.runtime.compress.colgroup.ColGroupEmpty; import org.apache.sysds.runtime.compress.colgroup.IContainADictionary; import org.apache.sysds.runtime.compress.colgroup.IContainDefaultTuple; import org.apache.sysds.runtime.compress.colgroup.indexes.IColIndex; import org.apache.sysds.runtime.compress.lib.CLALibCombineGroups; import org.apache.sysds.runtime.compress.utils.ACount; import org.apache.sysds.runtime.compress.utils.DblArray; import org.apache.sysds.runtime.compress.utils.DblArrayCountHashMap; import org.apache.sysds.runtime.compress.utils.DoubleCountHashMap; import org.apache.sysds.runtime.compress.utils.HashMapLongInt; import org.apache.sysds.runtime.compress.utils.HashMapLongInt.KV; import org.apache.sysds.runtime.data.SparseBlock; import org.apache.sysds.runtime.data.SparseRowVector; import org.apache.sysds.runtime.matrix.data.MatrixBlock; import org.apache.sysds.runtime.matrix.data.Pair; public interface DictionaryFactory { static final Log LOG = LogFactory.getLog(DictionaryFactory.class.getName()); public enum Type { FP64_DICT, MATRIX_BLOCK_DICT, INT8_DICT, IDENTITY, IDENTITY_SLICE, PLACE_HOLDER } public static IDictionary read(DataInput in) throws IOException { final Type type = Type.values()[in.readByte()]; switch(type) { case FP64_DICT: return Dictionary.read(in); case INT8_DICT: return QDictionary.read(in); case PLACE_HOLDER: return PlaceHolderDict.read(in); case IDENTITY: return IdentityDictionary.read(in); case IDENTITY_SLICE: return IdentityDictionarySlice.read(in); case MATRIX_BLOCK_DICT: default: return MatrixBlockDictionary.read(in); } } public static long getInMemorySize(int nrValues, int nrColumns, double tupleSparsity, boolean lossy) { if(lossy) return QDictionary.getInMemorySize(nrValues * nrColumns); else if(nrColumns > 1 && tupleSparsity < 0.4) return MatrixBlockDictionary.getInMemorySize(nrValues, nrColumns, tupleSparsity); else return Dictionary.getInMemorySize(nrValues * nrColumns); } public static IDictionary create(DblArrayCountHashMap map, int nCols, boolean addZeroTuple, double sparsity) { final ACount<DblArray>[] vals = map.extractValues(); final int nVals = vals.length; final int nTuplesOut = nVals + (addZeroTuple ? 1 : 0); if(sparsity < 0.4) { final MatrixBlock retB = new MatrixBlock(nTuplesOut, nCols, true); retB.allocateSparseRowsBlock(); final SparseBlock sb = retB.getSparseBlock(); for(int i = 0; i < nVals; i++) { final ACount<DblArray> dac = vals[i]; final double[] dv = dac.key().getData(); for(int k = 0; k < dv.length; k++) sb.append(dac.id, k, dv[k]); } retB.recomputeNonZeros(); retB.examSparsity(true); return MatrixBlockDictionary.create(retB); } else { final double[] resValues = new double[(nTuplesOut) * nCols]; for(int i = 0; i < nVals; i++) { final ACount<DblArray> dac = vals[i]; System.arraycopy(dac.key().getData(), 0, resValues, dac.id * nCols, nCols); } return Dictionary.create(resValues); } } public static IDictionary create(ABitmap ubm) { return create(ubm, 1.0); } public static IDictionary create(ABitmap ubm, double sparsity) { final int nCol = ubm.getNumColumns(); if(ubm instanceof Bitmap) return Dictionary.create(((Bitmap) ubm).getValues()); else if(sparsity < 0.4 && nCol > 4) { // && ubm instanceof MultiColBitmap final MultiColBitmap mcbm = (MultiColBitmap) ubm; final MatrixBlock m = new MatrixBlock(ubm.getNumValues(), nCol, true); m.allocateSparseRowsBlock(); final SparseBlock sb = m.getSparseBlock(); final int nVals = ubm.getNumValues(); for(int i = 0; i < nVals; i++) { final double[] tuple = mcbm.getValues(i); for(int col = 0; col < nCol; col++) sb.append(i, col, tuple[col]); } m.recomputeNonZeros(); m.examSparsity(true); return MatrixBlockDictionary.create(m); } else {// if(ubm instanceof MultiColBitmap) { MultiColBitmap mcbm = (MultiColBitmap) ubm; final int nVals = ubm.getNumValues(); double[] resValues = new double[nVals * nCol]; for(int i = 0; i < nVals; i++) System.arraycopy(mcbm.getValues(i), 0, resValues, i * nCol, nCol); return Dictionary.create(resValues); } } public static IDictionary create(ABitmap ubm, int defaultIndex, double[] defaultTuple, double sparsity, boolean addZero) { final int nCol = ubm.getNumColumns(); final int nVal = ubm.getNumValues() - (addZero ? 0 : 1); if(nCol > 4 && sparsity < 0.4) { final MultiColBitmap mcbm = (MultiColBitmap) ubm; // always multi column final MatrixBlock m = new MatrixBlock(nVal, nCol, true); m.allocateSparseRowsBlock(); final SparseBlock sb = m.getSparseBlock(); for(int i = 0; i < defaultIndex; i++) sb.set(i, new SparseRowVector(mcbm.getValues(i)), false); // copy default System.arraycopy(mcbm.getValues(defaultIndex), 0, defaultTuple, 0, nCol); for(int i = defaultIndex; i < ubm.getNumValues() - 1; i++) sb.set(i, new SparseRowVector(mcbm.getValues(i + 1)), false); m.recomputeNonZeros(); m.examSparsity(true); return MatrixBlockDictionary.create(m); } else { double[] dict = new double[nCol * nVal]; if(ubm instanceof Bitmap) { final double[] bmv = ((Bitmap) ubm).getValues(); System.arraycopy(bmv, 0, dict, 0, defaultIndex); defaultTuple[0] = bmv[defaultIndex]; System.arraycopy(bmv, defaultIndex + 1, dict, defaultIndex, bmv.length - defaultIndex - 1); } else { // if(ubm instanceof MultiColBitmap) { final MultiColBitmap mcbm = (MultiColBitmap) ubm; for(int i = 0; i < defaultIndex; i++) System.arraycopy(mcbm.getValues(i), 0, dict, i * nCol, nCol); System.arraycopy(mcbm.getValues(defaultIndex), 0, defaultTuple, 0, nCol); for(int i = defaultIndex; i < ubm.getNumValues() - 1; i++) System.arraycopy(mcbm.getValues(i + 1), 0, dict, i * nCol, nCol); } return Dictionary.create(dict); } } // public static IDictionary createWithAppendedZeroTuple(ABitmap ubm, double sparsity) { // final int nVals = ubm.getNumValues(); // final int nRows = nVals + 1; // final int nCols = ubm.getNumColumns(); // if(ubm instanceof Bitmap) { // final double[] resValues = new double[nRows]; // final double[] from = ((Bitmap) ubm).getValues(); // System.arraycopy(from, 0, resValues, 0, from.length); // return Dictionary.create(resValues); // } // final MultiColBitmap mcbm = (MultiColBitmap) ubm; // if(sparsity < 0.4 && nCols > 4) { // final MatrixBlock m = new MatrixBlock(nRows, nCols, true); // m.allocateSparseRowsBlock(); // final SparseBlock sb = m.getSparseBlock(); // for(int i = 0; i < nVals; i++) { // final double[] tuple = mcbm.getValues(i); // for(int col = 0; col < nCols; col++) // sb.append(i, col, tuple[col]); // } // m.recomputeNonZeros(); // m.examSparsity(true); // return MatrixBlockDictionary.create(m); // } // final double[] resValues = new double[nRows * nCols]; // for(int i = 0; i < nVals; i++) // System.arraycopy(mcbm.getValues(i), 0, resValues, i * nCols, nCols); // return Dictionary.create(resValues); // } public static IDictionary create(DoubleCountHashMap map) { final double[] resValues = map.getDictionary(); return Dictionary.create(resValues); } public static IDictionary combineDictionaries(AColGroupCompressed a, AColGroupCompressed b) { return combineDictionaries(a, b, null); } public static IDictionary combineDictionaries(AColGroupCompressed a, AColGroupCompressed b, HashMapLongInt filter) { if(a instanceof ColGroupEmpty && b instanceof ColGroupEmpty) return null; // null return is handled elsewhere. CompressionType ac = a.getCompType(); CompressionType bc = b.getCompType(); boolean ae = a instanceof IContainADictionary; boolean be = b instanceof IContainADictionary; if(ae && be) { final IDictionary ad = ((IContainADictionary) a).getDictionary(); final IDictionary bd = ((IContainADictionary) b).getDictionary(); if(ac.isConst()) { if(bc.isConst()) { return Dictionary.create(CLALibCombineGroups.constructDefaultTuple(a, b)); } else if(bc.isDense()) { final double[] at = ((IContainDefaultTuple) a).getDefaultTuple(); Pair<int[], int[]> r = IColIndex.reorderingIndexes(a.getColIndices(), b.getColIndices()); return combineConstLeft(at, bd, b.getNumCols(), r.getKey(), r.getValue(), filter); } } else if(ac.isDense()) { if(bc.isConst()) { final Pair<int[], int[]> r = IColIndex.reorderingIndexes(a.getColIndices(), b.getColIndices()); final double[] bt = ((IContainDefaultTuple) b).getDefaultTuple(); return combineSparseConstSparseRet(ad, a.getNumCols(), bt, r.getKey(), r.getValue(), filter); } else if(bc.isDense()) { return combineFullDictionaries(ad, a.getColIndices(), bd, b.getColIndices(), filter); } else if(bc.isSDC()) { double[] tuple = ((IContainDefaultTuple) b).getDefaultTuple(); return combineSDCRight(ad, a.getColIndices(), bd, tuple, b.getColIndices(), filter); } } else if(ac.isSDC()) { if(bc.isSDC()) { final double[] at = ((IContainDefaultTuple) a).getDefaultTuple(); final double[] bt = ((IContainDefaultTuple) b).getDefaultTuple(); return combineSDCFilter(ad, at, a.getColIndices(), bd, bt, b.getColIndices(), filter); } } } throw new NotImplementedException("Not supporting combining: " + a + " " + b); } /** * Combine the dictionaries assuming a sparse combination where each dictionary can be a SDC containing a default * element that have to be introduced into the combined dictionary. * * @param a A Dictionary can be SDC or const * @param b A Dictionary can be Const or SDC. * @return The combined dictionary */ public static IDictionary combineDictionariesSparse(AColGroupCompressed a, AColGroupCompressed b) { return combineDictionariesSparse(a, b, null); } /** * Combine the dictionaries assuming a sparse combination where each dictionary can be a SDC containing a default * element that have to be introduced into the combined dictionary. * * @param a A Dictionary can be SDC or const * @param b A Dictionary can be Const or SDC * @param filter A filter to remove elements in the combined dictionary * @return The combined dictionary */ public static IDictionary combineDictionariesSparse(AColGroupCompressed a, AColGroupCompressed b, HashMapLongInt filter) { CompressionType ac = a.getCompType(); CompressionType bc = b.getCompType(); if(filter != null) throw new NotImplementedException("Not supported filter for sparse join yet!"); if(ac.isSDC()) { final IDictionary ad = ((IContainADictionary) a).getDictionary(); if(bc.isConst()) { final Pair<int[], int[]> r = IColIndex.reorderingIndexes(a.getColIndices(), b.getColIndices()); double[] bt = ((IContainDefaultTuple) b).getDefaultTuple(); return combineSparseConstSparseRet(ad, a.getNumCols(), bt, r.getKey(), r.getValue()); } else if(bc.isSDC()) { final IDictionary bd = ((IContainADictionary) b).getDictionary(); if(a.sameIndexStructure(b)) { // in order or other order.. if(IColIndex.inOrder(a.getColIndices(), b.getColIndices())) return ad.cbind(bd, b.getNumCols()); else if(IColIndex.inOrder(b.getColIndices(), a.getColIndices())) return bd.cbind(ad, b.getNumCols()); else { final Pair<int[], int[]> r = IColIndex.reorderingIndexes(a.getColIndices(), b.getColIndices()); return cbindReorder(ad, bd, r.getKey(), r.getValue()); } } } } else if(ac.isConst()) { final double[] at = ((IContainDefaultTuple) a).getDefaultTuple(); if(bc.isSDC()) { final IDictionary bd = ((IContainADictionary) b).getDictionary(); final Pair<int[], int[]> r = IColIndex.reorderingIndexes(a.getColIndices(), b.getColIndices()); return combineConstLeftAll(at, bd, b.getNumCols(), r.getKey(), r.getValue()); } } throw new NotImplementedException("Not supporting combining dense: " + a + " " + b); } private static IDictionary cbindReorder(IDictionary a, IDictionary b, int[] ai, int[] bi) { final int nca = ai.length; final int ncb = bi.length; final int ra = a.getNumberOfValues(nca); final int rb = b.getNumberOfValues(ncb); final MatrixBlock ma = a.getMBDict(nca).getMatrixBlock(); final MatrixBlock mb = b.getMBDict(ncb).getMatrixBlock(); if(ra != rb) throw new DMLCompressionException("Invalid cbind reorder, different sizes of dictionaries"); final MatrixBlock out = new MatrixBlock(ra, nca + ncb, false); for(int r = 0; r < ra; r++) {// each row // for(int c = 0; c < nca; c++) out.set(r, ai[c], ma.get(r, c)); for(int c = 0; c < ncb; c++) out.set(r, bi[c], mb.get(r, c)); } return new MatrixBlockDictionary(out); } /** * Combine the dictionaries as if the dictionaries contain the full spectrum of the combined data. * * @param a Left side dictionary * @param nca Number of columns left dictionary * @param b Right side dictionary * @param ncb Number of columns right dictionary * @return A combined dictionary */ public static IDictionary combineFullDictionaries(IDictionary a, int nca, IDictionary b, int ncb) { return combineFullDictionaries(a, nca, b, ncb, null); } public static IDictionary combineFullDictionaries(IDictionary a, IColIndex ai, IDictionary b, IColIndex bi, HashMapLongInt filter) { final int nca = ai.size(); final int ncb = bi.size(); return combineFullDictionaries(a, ai, nca, b, bi, ncb, filter); } /** * Combine the dictionaries as if the dictionaries only contain the values in the specified filter. * * @param a Left side dictionary * @param nca Number of columns left dictionary * @param b Right side dictionary * @param ncb Number of columns right dictionary * @param filter The mapping filter to not include all possible combinations in the output, this filter is allowed to * be null, that means the output is defaulting back to a full combine * @return A combined dictionary */ public static IDictionary combineFullDictionaries(IDictionary a, int nca, IDictionary b, int ncb, HashMapLongInt filter) { return combineFullDictionaries(a, null, nca, b, null, ncb, filter); } public static IDictionary combineFullDictionaries(IDictionary a, IColIndex ai, int nca, IDictionary b, IColIndex bi, int ncb, HashMapLongInt filter) { final int ra = a.getNumberOfValues(nca); final int rb = b.getNumberOfValues(ncb); final MatrixBlock ma = a.getMBDict(nca).getMatrixBlock(); final MatrixBlock mb = b.getMBDict(ncb).getMatrixBlock(); final int filterSize = filter != null ? filter.size() : ra * rb; if(filterSize == 0) return null; final MatrixBlock out = new MatrixBlock(filterSize, nca + ncb, false); out.allocateBlock(); if(ai != null && bi != null && !IColIndex.inOrder(ai, bi)) { Pair<int[], int[]> reordering = IColIndex.reorderingIndexes(ai, bi); if(filter != null) // throw new NotImplementedException(); combineFullDictionariesOOOFilter(out, filter, ra, rb, nca, ncb, reordering.getKey(), reordering.getValue(), ma, mb); else combineFullDictionariesOOONoFilter(out, ra, rb, nca, ncb, reordering.getKey(), reordering.getValue(), ma, mb); } else { if(filter != null) combineFullDictionariesFilter(out, filter, ra, rb, nca, ncb, ma, mb); else combineFullDictionariesNoFilter(out, ra, rb, nca, ncb, ma, mb); } out.examSparsity(true); return new MatrixBlockDictionary(out); } private static void combineFullDictionariesFilter(MatrixBlock out, HashMapLongInt filter, int ra, int rb, int nca, int ncb, MatrixBlock ma, MatrixBlock mb) { for(KV k : filter) { final int r = (int) (k.k); final int o = k.v; int ia = r % ra; int ib = r / ra; for(int c = 0; c < nca; c++) out.set(o, c, ma.get(ia, c)); for(int c = 0; c < ncb; c++) out.set(o, c + nca, mb.get(ib, c)); } } private static void combineFullDictionariesOOOFilter(MatrixBlock out, HashMapLongInt filter, int ra, int rb, int nca, int ncb, int[] ai, int[] bi, MatrixBlock ma, MatrixBlock mb) { for(KV k : filter) { final int r = (int) (k.k); final int o = k.v; int ia = r % ra; int ib = r / ra; for(int c = 0; c < nca; c++) out.set(o, ai[c], ma.get(ia, c)); for(int c = 0; c < ncb; c++) out.set(o, bi[c], mb.get(ib, c)); } } private static void combineFullDictionariesOOONoFilter(MatrixBlock out, int ra, int rb, int nca, int ncb, int[] ai, int[] bi, MatrixBlock ma, MatrixBlock mb) { for(int r = 0; r < out.getNumRows(); r++) { int ia = r % ra; int ib = r / ra; for(int c = 0; c < nca; c++) out.set(r, ai[c], ma.get(ia, c)); for(int c = 0; c < ncb; c++) out.set(r, bi[c], mb.get(ib, c)); } } private static void combineFullDictionariesNoFilter(MatrixBlock out, int ra, int rb, int nca, int ncb, MatrixBlock ma, MatrixBlock mb) { for(int r = 0; r < out.getNumRows(); r++) { int ia = r % ra; int ib = r / ra; for(int c = 0; c < nca; c++) out.set(r, c, ma.get(ia, c)); for(int c = 0; c < ncb; c++) out.set(r, c + nca, mb.get(ib, c)); } } public static IDictionary combineSDCRightNoFilter(IDictionary a, int nca, IDictionary b, double[] tub) { return combineSDCRightNoFilter(a, null, nca, b, tub, null); } public static IDictionary combineSDCRightNoFilter(IDictionary a, IColIndex ai, int nca, IDictionary b, double[] tub, IColIndex bi) { if(ai != null || bi != null) throw new NotImplementedException(); final int ncb = tub.length; final int ra = a.getNumberOfValues(nca); final int rb = b.getNumberOfValues(ncb); final MatrixBlock ma = a.getMBDict(nca).getMatrixBlock(); final MatrixBlock mb = b.getMBDict(ncb).getMatrixBlock(); final MatrixBlock out = new MatrixBlock(ra * (rb + 1), nca + ncb, false); out.allocateBlock(); for(int r = 0; r < ra; r++) { for(int c = 0; c < nca; c++) out.set(r, c, ma.get(r, c)); for(int c = 0; c < ncb; c++) out.set(r, c + nca, tub[c]); } for(int r = ra; r < out.getNumRows(); r++) { int ia = r % ra; int ib = r / ra - 1; for(int c = 0; c < nca; c++) // all good. out.set(r, c, ma.get(ia, c)); for(int c = 0; c < ncb; c++) out.set(r, c + nca, mb.get(ib, c)); } return new MatrixBlockDictionary(out); } public static IDictionary combineSDCRight(IDictionary a, IColIndex ai, IDictionary b, double[] tub, IColIndex bi, HashMapLongInt filter) { return combineSDCRight(a, ai, ai.size(), b, tub, bi, filter); } public static IDictionary combineSDCRight(IDictionary a, int nca, IDictionary b, double[] tub, HashMapLongInt filter) { return combineSDCRight(a, null, nca, b, tub, null, filter); } public static IDictionary combineSDCRight(IDictionary a, IColIndex ai, int nca, IDictionary b, double[] tub, IColIndex bi, HashMapLongInt filter) { if(filter == null) return combineSDCRightNoFilter(a, ai, nca, b, tub, bi); final int ncb = tub.length; final int ra = a.getNumberOfValues(nca); final int rb = b.getNumberOfValues(ncb); final MatrixBlock ma = a.getMBDict(nca).getMatrixBlock(); final MatrixBlock mb = b.getMBDict(ncb).getMatrixBlock(); final MatrixBlock out = new MatrixBlock(filter.size(), nca + ncb, false); out.allocateBlock(); if(ai != null && bi != null) { Pair<int[], int[]> re = IColIndex.reorderingIndexes(ai, bi); combineSDCRightOOOFilter(out, nca, ncb, tub, ra, rb, ma, mb, re.getKey(), re.getValue(), filter); } else { combineSDCRightFilter(out, nca, ncb, tub, ra, rb, ma, mb, filter); } return new MatrixBlockDictionary(out); } private static void combineSDCRightFilter(MatrixBlock out, int nca, int ncb, double[] tub, int ra, int rb, MatrixBlock ma, MatrixBlock mb, HashMapLongInt filter) { for(int r = 0; r < ra; r++) { int o = filter.get(r); if(o != -1) { for(int c = 0; c < nca; c++) out.set(o, c, ma.get(r, c)); for(int c = 0; c < ncb; c++) out.set(o, c + nca, tub[c]); } } for(int r = ra; r < ra * rb + ra; r++) { int o = filter.get(r); if(o != -1) { int ia = r % ra; int ib = r / ra - 1; for(int c = 0; c < nca; c++) // all good. out.set(o, c, ma.get(ia, c)); for(int c = 0; c < ncb; c++) out.set(o, c + nca, mb.get(ib, c)); } } } private static void combineSDCRightOOOFilter(MatrixBlock out, int nca, int ncb, double[] tub, int ra, int rb, MatrixBlock ma, MatrixBlock mb, int[] ai, int[] bi, HashMapLongInt filter) { for(int r = 0; r < ra; r++) { int o = filter.get(r); if(o != -1) { for(int c = 0; c < nca; c++) out.set(o, ai[c], ma.get(r, c)); for(int c = 0; c < ncb; c++) out.set(o, bi[c], tub[c]); } } for(int r = ra; r < ra * rb + ra; r++) { int o = filter.get(r); if(o != -1) { int ia = r % ra; int ib = r / ra - 1; for(int c = 0; c < nca; c++) // all good. out.set(o, ai[c], ma.get(ia, c)); for(int c = 0; c < ncb; c++) out.set(o, bi[c], mb.get(ib, c)); } } } public static IDictionary combineSDCNoFilter(IDictionary a, double[] tua, IDictionary b, double[] tub) { return combineSDCNoFilter(a, tua, null, b, tub, null); } public static IDictionary combineSDCNoFilter(IDictionary a, double[] tua, IColIndex ai, IDictionary b, double[] tub, IColIndex bi) { final int nca = tua.length; final int ncb = tub.length; final int ra = a.getNumberOfValues(nca); final int rb = b.getNumberOfValues(ncb); final MatrixBlock ma = a.getMBDict(nca).getMatrixBlock(); final MatrixBlock mb = b.getMBDict(ncb).getMatrixBlock(); final MatrixBlock out = new MatrixBlock((ra + 1) * (rb + 1), nca + ncb, false); out.allocateBlock(); if(ai != null || bi != null) { final Pair<int[], int[]> re = IColIndex.reorderingIndexes(ai, bi); combineSDCNoFilterOOO(nca, ncb, tua, tub, out, ma, mb, ra, rb, re.getKey(), re.getValue()); } else combineSDCNoFilter(nca, ncb, tua, tub, out, ma, mb, ra, rb); return new MatrixBlockDictionary(out); } private static void combineSDCNoFilter(int nca, int ncb, double[] tua, double[] tub, MatrixBlock out, MatrixBlock ma, MatrixBlock mb, int ra, int rb) { // 0 row both default tuples for(int c = 0; c < nca; c++) out.set(0, c, tua[c]); for(int c = 0; c < ncb; c++) out.set(0, c + nca, tub[c]); // default case for b and all cases for a. for(int r = 1; r < ra + 1; r++) { for(int c = 0; c < nca; c++) out.set(r, c, ma.get(r - 1, c)); for(int c = 0; c < ncb; c++) out.set(r, c + nca, tub[c]); } for(int r = ra + 1; r < out.getNumRows(); r++) { final int ia = r % (ra + 1) - 1; final int ib = r / (ra + 1) - 1; if(ia == -1) for(int c = 0; c < nca; c++) out.set(r, c, tua[c]); else for(int c = 0; c < nca; c++) out.set(r, c, ma.get(ia, c)); for(int c = 0; c < ncb; c++) // all good here. out.set(r, c + nca, mb.get(ib, c)); } } private static void combineSDCNoFilterOOO(int nca, int ncb, double[] tua, double[] tub, MatrixBlock out, MatrixBlock ma, MatrixBlock mb, int ra, int rb, int[] ai, int[] bi) { // 0 row both default tuples for(int c = 0; c < nca; c++) out.set(0, ai[c], tua[c]); for(int c = 0; c < ncb; c++) out.set(0, bi[c], tub[c]); // default case for b and all cases for a. for(int r = 1; r < ra + 1; r++) { for(int c = 0; c < nca; c++) out.set(r, ai[c], ma.get(r - 1, c)); for(int c = 0; c < ncb; c++) out.set(r, bi[c], tub[c]); } for(int r = ra + 1; r < out.getNumRows(); r++) { final int ia = r % (ra + 1) - 1; final int ib = r / (ra + 1) - 1; if(ia == -1) for(int c = 0; c < nca; c++) out.set(r, ai[c], tua[c]); else for(int c = 0; c < nca; c++) out.set(r, ai[c], ma.get(ia, c)); for(int c = 0; c < ncb; c++) // all good here. out.set(r, bi[c], mb.get(ib, c)); } } public static IDictionary combineSDCFilter(IDictionary a, double[] tua, IDictionary b, double[] tub, HashMapLongInt filter) { return combineSDCFilter(a, tua, null, b, tub, null, filter); } public static IDictionary combineSDCFilter(IDictionary a, double[] tua, IColIndex ai, IDictionary b, double[] tub, IColIndex bi, HashMapLongInt filter) { if(filter == null) return combineSDCNoFilter(a, tua, ai, b, tub, bi); final int nca = tua.length; final int ncb = tub.length; final int ra = a.getNumberOfValues(nca); final int rb = b.getNumberOfValues(ncb); final MatrixBlock ma = a.getMBDict(nca).getMatrixBlock(); final MatrixBlock mb = b.getMBDict(ncb).getMatrixBlock(); final MatrixBlock out = new MatrixBlock(filter.size(), nca + ncb, false); out.allocateBlock(); if(ai != null && bi != null) { Pair<int[], int[]> re = IColIndex.reorderingIndexes(ai, bi); combineSDCFilterOOO(filter, nca, ncb, tua, tub, out, ma, mb, ra, rb, re.getKey(), re.getValue()); } else combineSDCFilter(filter, nca, ncb, tua, tub, out, ma, mb, ra, rb); return new MatrixBlockDictionary(out); } private static void combineSDCFilter(HashMapLongInt filter, int nca, int ncb, double[] tua, double[] tub, MatrixBlock out, MatrixBlock ma, MatrixBlock mb, int ra, int rb) { // 0 row both default tuples final int o0 = filter.get(0); if(o0 != -1) { for(int c = 0; c < nca; c++) out.set(o0, c, tua[c]); for(int c = 0; c < ncb; c++) out.set(o0, c + nca, tub[c]); } // default case for b and all cases for a. for(int r = 1; r < ra + 1; r++) { final int o = filter.get(r); if(o != -1) { for(int c = 0; c < nca; c++) out.set(o, c, ma.get(r - 1, c)); for(int c = 0; c < ncb; c++) out.set(o, c + nca, tub[c]); } } for(int r = ra + 1; r < ra * rb + ra + rb + 1; r++) { final int o = filter.get(r); if(o != -1) { final int ia = r % (ra + 1) - 1; final int ib = r / (ra + 1) - 1; if(ia == -1) for(int c = 0; c < nca; c++) out.set(o, c, tua[c]); else for(int c = 0; c < nca; c++) out.set(o, c, ma.get(ia, c)); for(int c = 0; c < ncb; c++) // all good here. out.set(o, c + nca, mb.get(ib, c)); } } } private static void combineSDCFilterOOO(HashMapLongInt filter, int nca, int ncb, double[] tua, double[] tub, MatrixBlock out, MatrixBlock ma, MatrixBlock mb, int ra, int rb, int[] ai, int[] bi) { // 0 row both default tuples final int o0 = filter.get(0); if(o0 != -1) { for(int c = 0; c < nca; c++) out.set(o0, ai[c], tua[c]); for(int c = 0; c < ncb; c++) out.set(o0, bi[c], tub[c]); } // default case for b and all cases for a. for(int r = 1; r < ra + 1; r++) { final int o = filter.get(r); if(o != -1) { for(int c = 0; c < nca; c++) out.set(o, ai[c], ma.get(r - 1, c)); for(int c = 0; c < ncb; c++) out.set(o, bi[c], tub[c]); } } for(int r = ra + 1; r < ra * rb + ra + rb + 1; r++) { final int o = filter.get(r); if(o != -1) { final int ia = r % (ra + 1) - 1; final int ib = r / (ra + 1) - 1; if(ia == -1) for(int c = 0; c < nca; c++) out.set(o, ai[c], tua[c]); else for(int c = 0; c < nca; c++) out.set(o, ai[c], ma.get(ia, c)); for(int c = 0; c < ncb; c++) // all good here. out.set(o, bi[c], mb.get(ib, c)); } } } private static IDictionary combineSparseConstSparseRet(IDictionary a, int nca, double[] tub, int[] ai, int[] bi) { final int ncb = tub.length; final int ra = a.getNumberOfValues(nca); MatrixBlock ma = a.getMBDict(nca).getMatrixBlock(); MatrixBlock out = new MatrixBlock(ra, nca + ncb, false); out.allocateBlock(); // default case for b and all cases for a. for(int r = 0; r < ra; r++) { for(int c = 0; c < nca; c++) out.set(r, ai[c], ma.get(r, c)); for(int c = 0; c < ncb; c++) out.set(r, bi[c], tub[c]); } return new MatrixBlockDictionary(out); } private static IDictionary combineSparseConstSparseRet(IDictionary a, int nca, double[] tub, int[] ai, int[] bi, HashMapLongInt filter) { if(filter == null) return combineSparseConstSparseRet(a, nca, tub, ai, bi); else throw new NotImplementedException(); // final int ncb = tub.length; // final int ra = a.getNumberOfValues(nca); // MatrixBlock ma = a.getMBDict(nca).getMatrixBlock(); // MatrixBlock out = new MatrixBlock(ra, nca + ncb, false); // out.allocateBlock(); // // default case for b and all cases for a. // for(int r = 0; r < ra; r++) { // for(int c = 0; c < nca; c++) // out.set(r, c, ma.get(r, c)); // for(int c = 0; c < ncb; c++) // out.set(r, c + nca, tub[c]); // } // return new MatrixBlockDictionary(out); } private static IDictionary combineConstLeftAll(double[] tua, IDictionary b, int ncb, int[] ai, int[] bi) { final int nca = tua.length; final int rb = b.getNumberOfValues(ncb); MatrixBlock mb = b.getMBDict(ncb).getMatrixBlock(); MatrixBlock out = new MatrixBlock(rb, nca + ncb, false); out.allocateBlock(); // default case for b and all cases for a. for(int r = 0; r < rb; r++) { for(int c = 0; c < nca; c++) out.set(r, ai[c], tua[c]); for(int c = 0; c < ncb; c++) out.set(r, bi[c], mb.get(r, c)); } return new MatrixBlockDictionary(out); } private static IDictionary combineConstLeft(double[] tua, IDictionary b, int ncb, int[] ai, int[] bi, HashMapLongInt filter) { if(filter == null) return combineConstLeftAll(tua, b, ncb, ai, bi); else throw new NotImplementedException(); // final int nca = tua.length; // final int rb = b.getNumberOfValues(ncb); // MatrixBlock mb = b.getMBDict(ncb).getMatrixBlock(); // MatrixBlock out = new MatrixBlock(rb, nca + ncb, false); // out.allocateBlock(); // // default case for b and all cases for a. // for(int r = 0; r < rb; r++) { // for(int c = 0; c < nca; c++) // out.set(r, c, tua[c]); // for(int c = 0; c < ncb; c++) // out.set(r, c + nca, mb.get(r, c)); // } // return new MatrixBlockDictionary(out); } public static IDictionary cBindDictionaries(int nCol, List<IDictionary> dicts) { MatrixBlockDictionary baseDict = dicts.get(0).getMBDict(nCol); MatrixBlock base = baseDict == null ? new MatrixBlock(1, nCol, true) : baseDict.getMatrixBlock(); MatrixBlock[] others = new MatrixBlock[dicts.size() - 1]; for(int i = 1; i < dicts.size(); i++) { MatrixBlockDictionary otherDict = dicts.get(i).getMBDict(nCol); MatrixBlock otherBase = otherDict == null ? new MatrixBlock(1, nCol, true) : otherDict.getMatrixBlock(); others[i - 1] = otherBase; } MatrixBlock ret = base.append(others, null, true); return MatrixBlockDictionary.create(ret, true); } // public static IDictionary cBindDictionaries(List<Pair<Integer, IDictionary>> dicts) { // MatrixBlock base = dicts.get(0).getValue().getMBDict(dicts.get(0).getKey()).getMatrixBlock(); // MatrixBlock[] others = new MatrixBlock[dicts.size() - 1]; // for(int i = 1; i < dicts.size(); i++) { // Pair<Integer, IDictionary> p = dicts.get(i); // others[i - 1] = p.getValue().getMBDict(p.getKey()).getMatrixBlock(); // } // MatrixBlock ret = base.append(others, null, true); // return new MatrixBlockDictionary(ret); // } public static IDictionary cBindDictionaries(IDictionary left, IDictionary right, int nColLeft, int nColRight) { MatrixBlockDictionary base = left.getMBDict(nColLeft); MatrixBlockDictionary add = right.getMBDict(nColRight); MatrixBlock a = base == null ? (add != null ? new MatrixBlock(add.getNumberOfValues(nColRight), nColLeft, true) : new MatrixBlock(1, nColLeft, true)) : base.getMatrixBlock(); MatrixBlock b = add == null ? new MatrixBlock(a.getNumRows(), nColRight, true) : add.getMatrixBlock(); MatrixBlock ret = a.append(b, null, true); return MatrixBlockDictionary.create(ret, true); } }
apache/druid
35,378
indexing-service/src/test/java/org/apache/druid/indexing/common/task/concurrent/ConcurrentReplaceAndStreamingAppendTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.indexing.common.task.concurrent; import com.google.common.base.Optional; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import org.apache.druid.indexing.common.MultipleFileTaskReportFileWriter; import org.apache.druid.indexing.common.TaskLock; import org.apache.druid.indexing.common.TaskStorageDirTracker; import org.apache.druid.indexing.common.TaskToolbox; import org.apache.druid.indexing.common.TaskToolboxFactory; import org.apache.druid.indexing.common.actions.RetrieveUsedSegmentsAction; import org.apache.druid.indexing.common.actions.TaskActionClient; import org.apache.druid.indexing.common.actions.TaskActionClientFactory; import org.apache.druid.indexing.common.config.TaskConfig; import org.apache.druid.indexing.common.config.TaskConfigBuilder; import org.apache.druid.indexing.common.task.IngestionTestBase; import org.apache.druid.indexing.common.task.NoopTask; import org.apache.druid.indexing.common.task.NoopTaskContextEnricher; import org.apache.druid.indexing.common.task.Task; import org.apache.druid.indexing.common.task.TestAppenderatorsManager; import org.apache.druid.indexing.overlord.SegmentPublishResult; import org.apache.druid.indexing.overlord.Segments; import org.apache.druid.indexing.overlord.TaskQueue; import org.apache.druid.indexing.overlord.TaskRunner; import org.apache.druid.indexing.overlord.TestTaskToolboxFactory; import org.apache.druid.indexing.overlord.ThreadingTaskRunner; import org.apache.druid.indexing.overlord.config.DefaultTaskConfig; import org.apache.druid.indexing.overlord.config.TaskLockConfig; import org.apache.druid.indexing.overlord.config.TaskQueueConfig; import org.apache.druid.indexing.overlord.supervisor.SupervisorManager; import org.apache.druid.indexing.worker.config.WorkerConfig; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.granularity.Granularities; import org.apache.druid.metadata.PendingSegmentRecord; import org.apache.druid.segment.IndexIO; import org.apache.druid.segment.TestDataSource; import org.apache.druid.segment.column.ColumnConfig; import org.apache.druid.segment.realtime.appenderator.SegmentIdWithShardSpec; import org.apache.druid.server.DruidNode; import org.apache.druid.server.metrics.NoopServiceEmitter; import org.apache.druid.tasklogs.NoopTaskLogs; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.SegmentId; import org.apache.druid.timeline.partition.NumberedShardSpec; import org.easymock.Capture; import org.easymock.CaptureType; import org.easymock.EasyMock; import org.joda.time.Interval; import org.joda.time.Period; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; /** * Contains tests to verify behaviour of concurrently running REPLACE and APPEND * tasks on the same interval of a datasource. * <p> * The tests verify the interleaving of the following actions: * <ul> * <li>LOCK: Acquisition of a lock on an interval by a replace task</li> * <li>ALLOCATE: Allocation of a pending segment by an append task</li> * <li>REPLACE: Commit of segments created by a replace task</li> * <li>APPEND: Commit of segments created by an append task</li> * </ul> */ public class ConcurrentReplaceAndStreamingAppendTest extends IngestionTestBase { /** * The version used by append jobs when no previous replace job has run on an interval. */ private static final String SEGMENT_V0 = DateTimes.EPOCH.toString(); private static final Interval JAN_23 = Intervals.of("2023-01/2023-02"); private static final Interval FIRST_OF_JAN_23 = Intervals.of("2023-01-01/2023-01-02"); private TaskQueue taskQueue; private TaskActionClientFactory taskActionClientFactory; private TaskActionClient dummyTaskActionClient; private final List<ActionsTestTask> runningTasks = new ArrayList<>(); private ActionsTestTask appendTask; private ActionsTestTask replaceTask; private final AtomicInteger groupId = new AtomicInteger(0); private final SupervisorManager supervisorManager = EasyMock.mock(SupervisorManager.class); private Capture<String> supervisorId; private Capture<PendingSegmentRecord> pendingSegment; private Map<String, Map<Interval, Set<Object>>> versionToIntervalToLoadSpecs; private Map<String, Object> parentSegmentToLoadSpec; @Override @Before public void setUpIngestionTestBase() throws IOException { EasyMock.reset(supervisorManager); EasyMock.expect(supervisorManager.getActiveSupervisorIdForDatasourceWithAppendLock(TestDataSource.WIKI)) .andReturn(Optional.of(TestDataSource.WIKI)).anyTimes(); super.setUpIngestionTestBase(); final TaskConfig taskConfig = new TaskConfigBuilder().build(); taskActionClientFactory = createActionClientFactory(); dummyTaskActionClient = taskActionClientFactory.create(NoopTask.create()); final WorkerConfig workerConfig = new WorkerConfig().setCapacity(10); TaskRunner taskRunner = new ThreadingTaskRunner( createToolboxFactory(taskConfig, taskActionClientFactory), taskConfig, workerConfig, new NoopTaskLogs(), getObjectMapper(), new TestAppenderatorsManager(), new MultipleFileTaskReportFileWriter(), new DruidNode("middleManager", "host", false, 8091, null, true, false), TaskStorageDirTracker.fromConfigs(workerConfig, taskConfig) ); taskQueue = new TaskQueue( new TaskLockConfig(), new TaskQueueConfig(null, new Period(0L), null, null, null, null), new DefaultTaskConfig(), getTaskStorage(), taskRunner, taskActionClientFactory, getLockbox(), new NoopServiceEmitter(), getObjectMapper(), new NoopTaskContextEnricher() ); runningTasks.clear(); taskQueue.start(); groupId.set(0); appendTask = createAndStartTask(); supervisorId = Capture.newInstance(CaptureType.ALL); pendingSegment = Capture.newInstance(CaptureType.ALL); EasyMock.expect(supervisorManager.registerUpgradedPendingSegmentOnSupervisor( EasyMock.capture(supervisorId), EasyMock.capture(pendingSegment) )).andReturn(true).anyTimes(); replaceTask = createAndStartTask(); EasyMock.replay(supervisorManager); versionToIntervalToLoadSpecs = new HashMap<>(); parentSegmentToLoadSpec = new HashMap<>(); } @After public void tearDown() { verifyVersionIntervalLoadSpecUniqueness(); for (ActionsTestTask task : runningTasks) { task.finishRunAndGetStatus(); } } @Test public void testLockReplaceAllocateAppend() { final String v1 = replaceTask.acquireReplaceLockOn(FIRST_OF_JAN_23).getVersion(); final DataSegment segmentV10 = createSegment(FIRST_OF_JAN_23, v1); commitReplaceSegments(segmentV10); verifyIntervalHasUsedSegments(FIRST_OF_JAN_23, segmentV10); final SegmentIdWithShardSpec pendingSegment = appendTask.allocateSegmentForTimestamp(FIRST_OF_JAN_23.getStart(), Granularities.DAY); Assert.assertEquals(segmentV10.getVersion(), pendingSegment.getVersion()); final DataSegment segmentV11 = asSegment(pendingSegment); commitAppendSegments(segmentV11); verifyIntervalHasUsedSegments(FIRST_OF_JAN_23, segmentV10, segmentV11); verifyIntervalHasVisibleSegments(FIRST_OF_JAN_23, segmentV10, segmentV11); } @Test public void testLockAllocateAppendDayReplaceDay() { final String v1 = replaceTask.acquireReplaceLockOn(FIRST_OF_JAN_23).getVersion(); final SegmentIdWithShardSpec pendingSegment = appendTask.allocateSegmentForTimestamp(FIRST_OF_JAN_23.getStart(), Granularities.DAY); Assert.assertEquals(SEGMENT_V0, pendingSegment.getVersion()); final DataSegment segmentV01 = asSegment(pendingSegment); commitAppendSegments(segmentV01); verifyIntervalHasUsedSegments(FIRST_OF_JAN_23, segmentV01); verifyIntervalHasVisibleSegments(FIRST_OF_JAN_23, segmentV01); final DataSegment segmentV10 = createSegment(FIRST_OF_JAN_23, v1); commitReplaceSegments(segmentV10); // Verify that the segment appended to v0 gets upgraded to v1 final DataSegment segmentV11 = DataSegment.builder(segmentV01) .shardSpec(new NumberedShardSpec(1, 1)) .version(v1).build(); verifyIntervalHasUsedSegments(FIRST_OF_JAN_23, segmentV01, segmentV10, segmentV11); verifyIntervalHasVisibleSegments(FIRST_OF_JAN_23, segmentV10, segmentV11); } @Test public void testLockAllocateReplaceDayAppendDay() { final String v1 = replaceTask.acquireReplaceLockOn(FIRST_OF_JAN_23).getVersion(); final SegmentIdWithShardSpec pendingSegment = appendTask.allocateSegmentForTimestamp(FIRST_OF_JAN_23.getStart(), Granularities.DAY); Assert.assertEquals(SEGMENT_V0, pendingSegment.getVersion()); final DataSegment segmentV10 = createSegment(FIRST_OF_JAN_23, v1); commitReplaceSegments(segmentV10); verifyIntervalHasUsedSegments(FIRST_OF_JAN_23, segmentV10); verifyIntervalHasVisibleSegments(FIRST_OF_JAN_23, segmentV10); final DataSegment segmentV01 = asSegment(pendingSegment); commitAppendSegments(segmentV01); // Verify that the segment appended to v0 gets upgraded to v1 final DataSegment segmentV11 = DataSegment.builder(segmentV01) .shardSpec(new NumberedShardSpec(1, 1)) .version(v1).build(); verifyIntervalHasUsedSegments(FIRST_OF_JAN_23, segmentV01, segmentV10, segmentV11); verifyIntervalHasVisibleSegments(FIRST_OF_JAN_23, segmentV10, segmentV11); } @Test public void testAllocateLockReplaceDayAppendDay() { final SegmentIdWithShardSpec pendingSegment = appendTask.allocateSegmentForTimestamp(FIRST_OF_JAN_23.getStart(), Granularities.DAY); Assert.assertEquals(SEGMENT_V0, pendingSegment.getVersion()); final String v1 = replaceTask.acquireReplaceLockOn(FIRST_OF_JAN_23).getVersion(); final DataSegment segmentV10 = createSegment(FIRST_OF_JAN_23, v1); commitReplaceSegments(segmentV10); verifyIntervalHasUsedSegments(FIRST_OF_JAN_23, segmentV10); verifyIntervalHasVisibleSegments(FIRST_OF_JAN_23, segmentV10); final DataSegment segmentV01 = asSegment(pendingSegment); commitAppendSegments(segmentV01); // Verify that the segment appended to v0 gets upgraded to v1 final DataSegment segmentV11 = DataSegment.builder(segmentV01) .shardSpec(new NumberedShardSpec(1, 1)) .version(v1).build(); verifyIntervalHasUsedSegments(FIRST_OF_JAN_23, segmentV01, segmentV10, segmentV11); verifyIntervalHasVisibleSegments(FIRST_OF_JAN_23, segmentV10, segmentV11); } @Test public void testAllocateLockAppendDayReplaceDay() { final SegmentIdWithShardSpec pendingSegment = appendTask.allocateSegmentForTimestamp(FIRST_OF_JAN_23.getStart(), Granularities.DAY); Assert.assertEquals(SEGMENT_V0, pendingSegment.getVersion()); final String v1 = replaceTask.acquireReplaceLockOn(FIRST_OF_JAN_23).getVersion(); final DataSegment segmentV01 = asSegment(pendingSegment); commitAppendSegments(segmentV01); verifyIntervalHasUsedSegments(FIRST_OF_JAN_23, segmentV01); verifyIntervalHasVisibleSegments(FIRST_OF_JAN_23, segmentV01); final DataSegment segmentV10 = createSegment(FIRST_OF_JAN_23, v1); commitReplaceSegments(segmentV10); replaceTask.finishRunAndGetStatus(); // Verify that the segment appended to v0 gets upgraded to v1 final DataSegment segmentV11 = DataSegment.builder(segmentV01) .shardSpec(new NumberedShardSpec(1, 1)) .version(v1).build(); verifyIntervalHasUsedSegments(FIRST_OF_JAN_23, segmentV01, segmentV10, segmentV11); verifyIntervalHasVisibleSegments(FIRST_OF_JAN_23, segmentV10, segmentV11); } @Test public void testAllocateAppendDayLockReplaceDay() { final SegmentIdWithShardSpec pendingSegment = appendTask.allocateSegmentForTimestamp(FIRST_OF_JAN_23.getStart(), Granularities.DAY); Assert.assertEquals(SEGMENT_V0, pendingSegment.getVersion()); final DataSegment segmentV01 = asSegment(pendingSegment); commitAppendSegments(segmentV01); verifyIntervalHasUsedSegments(FIRST_OF_JAN_23, segmentV01); verifyIntervalHasVisibleSegments(FIRST_OF_JAN_23, segmentV01); final String v1 = replaceTask.acquireReplaceLockOn(FIRST_OF_JAN_23).getVersion(); final DataSegment segmentV10 = createSegment(FIRST_OF_JAN_23, v1); commitReplaceSegments(segmentV10); // Verify that the segment appended to v0 gets fully overshadowed verifyIntervalHasUsedSegments(FIRST_OF_JAN_23, segmentV01, segmentV10); verifyIntervalHasVisibleSegments(FIRST_OF_JAN_23, segmentV10); } @Test public void testLockReplaceMonthAllocateAppendDay() { String v1 = replaceTask.acquireReplaceLockOn(JAN_23).getVersion(); final DataSegment segmentV10 = createSegment(JAN_23, v1); commitReplaceSegments(segmentV10); verifyIntervalHasUsedSegments(JAN_23, segmentV10); verifyIntervalHasVisibleSegments(FIRST_OF_JAN_23, segmentV10); // Verify that the allocated segment takes the version and interval of previous replace final SegmentIdWithShardSpec pendingSegment = appendTask.allocateSegmentForTimestamp(FIRST_OF_JAN_23.getStart(), Granularities.DAY); Assert.assertEquals(JAN_23, pendingSegment.getInterval()); Assert.assertEquals(v1, pendingSegment.getVersion()); final DataSegment segmentV11 = asSegment(pendingSegment); commitAppendSegments(segmentV11); verifyIntervalHasUsedSegments(JAN_23, segmentV10, segmentV11); verifyIntervalHasVisibleSegments(JAN_23, segmentV10, segmentV11); } @Test public void testLockAllocateAppendDayReplaceMonth() { final String v1 = replaceTask.acquireReplaceLockOn(JAN_23).getVersion(); final SegmentIdWithShardSpec pendingSegment = appendTask.allocateSegmentForTimestamp(FIRST_OF_JAN_23.getStart(), Granularities.DAY); Assert.assertEquals(FIRST_OF_JAN_23, pendingSegment.getInterval()); Assert.assertEquals(SEGMENT_V0, pendingSegment.getVersion()); final DataSegment segmentV01 = asSegment(pendingSegment); commitAppendSegments(segmentV01); verifyIntervalHasUsedSegments(FIRST_OF_JAN_23, segmentV01); verifyIntervalHasVisibleSegments(FIRST_OF_JAN_23, segmentV01); final DataSegment segmentV10 = createSegment(JAN_23, v1); commitReplaceSegments(segmentV10); // Verify that append segment gets upgraded to replace version final DataSegment segmentV11 = DataSegment.builder(segmentV01) .version(v1) .interval(segmentV10.getInterval()) .shardSpec(new NumberedShardSpec(1, 1)) .build(); verifyIntervalHasUsedSegments(JAN_23, segmentV01, segmentV10, segmentV11); verifyIntervalHasVisibleSegments(JAN_23, segmentV10, segmentV11); } @Test public void testLockAllocateReplaceMonthAppendDay() { final String v1 = replaceTask.acquireReplaceLockOn(JAN_23).getVersion(); final SegmentIdWithShardSpec pendingSegment = appendTask.allocateSegmentForTimestamp(FIRST_OF_JAN_23.getStart(), Granularities.DAY); Assert.assertEquals(FIRST_OF_JAN_23, pendingSegment.getInterval()); Assert.assertEquals(SEGMENT_V0, pendingSegment.getVersion()); final DataSegment segmentV10 = createSegment(JAN_23, v1); commitReplaceSegments(segmentV10); verifyIntervalHasUsedSegments(FIRST_OF_JAN_23, segmentV10); verifyIntervalHasVisibleSegments(FIRST_OF_JAN_23, segmentV10); final DataSegment segmentV01 = asSegment(pendingSegment); commitAppendSegments(segmentV01); // Verify that append segment gets upgraded to replace version final DataSegment segmentV11 = DataSegment.builder(segmentV01) .version(v1) .interval(segmentV10.getInterval()) .shardSpec(new NumberedShardSpec(1, 1)) .build(); verifyIntervalHasUsedSegments(JAN_23, segmentV01, segmentV10, segmentV11); verifyIntervalHasVisibleSegments(JAN_23, segmentV10, segmentV11); } @Test public void testAllocateLockReplaceMonthAppendDay() { final SegmentIdWithShardSpec pendingSegment = appendTask.allocateSegmentForTimestamp(FIRST_OF_JAN_23.getStart(), Granularities.DAY); Assert.assertEquals(FIRST_OF_JAN_23, pendingSegment.getInterval()); Assert.assertEquals(SEGMENT_V0, pendingSegment.getVersion()); final String v1 = replaceTask.acquireReplaceLockOn(JAN_23).getVersion(); final DataSegment segmentV10 = createSegment(JAN_23, v1); commitReplaceSegments(segmentV10); verifyIntervalHasUsedSegments(FIRST_OF_JAN_23, segmentV10); verifyIntervalHasVisibleSegments(FIRST_OF_JAN_23, segmentV10); final DataSegment segmentV01 = asSegment(pendingSegment); commitAppendSegments(segmentV01); // Verify that append segment gets upgraded to replace version final DataSegment segmentV11 = DataSegment.builder(segmentV01) .version(v1) .interval(segmentV10.getInterval()) .shardSpec(new NumberedShardSpec(1, 1)) .build(); verifyIntervalHasUsedSegments(JAN_23, segmentV01, segmentV10, segmentV11); verifyIntervalHasVisibleSegments(JAN_23, segmentV10, segmentV11); } @Test public void testAllocateLockAppendDayReplaceMonth() { final SegmentIdWithShardSpec pendingSegment = appendTask.allocateSegmentForTimestamp(FIRST_OF_JAN_23.getStart(), Granularities.DAY); Assert.assertEquals(FIRST_OF_JAN_23, pendingSegment.getInterval()); Assert.assertEquals(SEGMENT_V0, pendingSegment.getVersion()); final String v1 = replaceTask.acquireReplaceLockOn(JAN_23).getVersion(); final DataSegment segmentV01 = asSegment(pendingSegment); commitAppendSegments(segmentV01); verifyIntervalHasUsedSegments(FIRST_OF_JAN_23, segmentV01); verifyIntervalHasVisibleSegments(FIRST_OF_JAN_23, segmentV01); final DataSegment segmentV10 = createSegment(JAN_23, v1); commitReplaceSegments(segmentV10); // Verify that append segment gets upgraded to replace version final DataSegment segmentV11 = DataSegment.builder(segmentV01) .version(v1) .interval(segmentV10.getInterval()) .shardSpec(new NumberedShardSpec(1, 1)) .build(); verifyIntervalHasUsedSegments(JAN_23, segmentV01, segmentV10, segmentV11); verifyIntervalHasVisibleSegments(JAN_23, segmentV10, segmentV11); } @Test public void testAllocateAppendDayLockReplaceMonth() { final SegmentIdWithShardSpec pendingSegment = appendTask.allocateSegmentForTimestamp(FIRST_OF_JAN_23.getStart(), Granularities.DAY); Assert.assertEquals(FIRST_OF_JAN_23, pendingSegment.getInterval()); Assert.assertEquals(SEGMENT_V0, pendingSegment.getVersion()); final DataSegment segmentV01 = asSegment(pendingSegment); commitAppendSegments(segmentV01); verifyIntervalHasUsedSegments(FIRST_OF_JAN_23, segmentV01); verifyIntervalHasVisibleSegments(FIRST_OF_JAN_23, segmentV01); final String v1 = replaceTask.acquireReplaceLockOn(JAN_23).getVersion(); final DataSegment segmentV10 = createSegment(JAN_23, v1); commitReplaceSegments(segmentV10); // Verify that the old segment gets completely replaced verifyIntervalHasUsedSegments(JAN_23, segmentV01, segmentV10); verifyIntervalHasVisibleSegments(JAN_23, segmentV10); } @Test public void testLockReplaceDayAllocateAppendMonth() { final String v1 = replaceTask.acquireReplaceLockOn(FIRST_OF_JAN_23).getVersion(); final DataSegment segmentV10 = createSegment(FIRST_OF_JAN_23, v1); replaceTask.commitReplaceSegments(segmentV10); verifyIntervalHasUsedSegments(FIRST_OF_JAN_23, segmentV10); verifyIntervalHasVisibleSegments(FIRST_OF_JAN_23, segmentV10); // Verify that an APPEND lock cannot be acquired on month TaskLock appendLock = appendTask.acquireAppendLockOn(JAN_23); Assert.assertNull(appendLock); // Verify that new segment gets allocated with DAY granularity even though preferred was MONTH final SegmentIdWithShardSpec pendingSegment = appendTask.allocateSegmentForTimestamp(JAN_23.getStart(), Granularities.MONTH); Assert.assertEquals(v1, pendingSegment.getVersion()); Assert.assertEquals(FIRST_OF_JAN_23, pendingSegment.getInterval()); final DataSegment segmentV11 = asSegment(pendingSegment); commitAppendSegments(segmentV11); verifyIntervalHasUsedSegments(JAN_23, segmentV10, segmentV11); verifyIntervalHasVisibleSegments(JAN_23, segmentV10, segmentV11); } @Test public void testLockAllocateAppendMonthReplaceDay() { final String v1 = replaceTask.acquireReplaceLockOn(FIRST_OF_JAN_23).getVersion(); // Verify that an APPEND lock cannot be acquired on month TaskLock appendLock = appendTask.acquireAppendLockOn(JAN_23); Assert.assertNull(appendLock); // Verify that the segment is allocated for DAY granularity final SegmentIdWithShardSpec pendingSegment = appendTask.allocateSegmentForTimestamp(JAN_23.getStart(), Granularities.MONTH); Assert.assertEquals(FIRST_OF_JAN_23, pendingSegment.getInterval()); Assert.assertEquals(SEGMENT_V0, pendingSegment.getVersion()); final DataSegment segmentV01 = asSegment(pendingSegment); commitAppendSegments(segmentV01); verifyIntervalHasUsedSegments(FIRST_OF_JAN_23, segmentV01); verifyIntervalHasVisibleSegments(FIRST_OF_JAN_23, segmentV01); final DataSegment segmentV10 = createSegment(FIRST_OF_JAN_23, v1); commitReplaceSegments(segmentV10); // Verify that append segment gets upgraded to replace version final DataSegment segmentV11 = DataSegment.builder(segmentV01) .version(v1) .interval(segmentV10.getInterval()) .shardSpec(new NumberedShardSpec(1, 1)) .build(); verifyIntervalHasUsedSegments(FIRST_OF_JAN_23, segmentV01, segmentV10, segmentV11); verifyIntervalHasVisibleSegments(FIRST_OF_JAN_23, segmentV10, segmentV11); } @Test public void testLockAllocateReplaceDayAppendMonth() { final String v1 = replaceTask.acquireReplaceLockOn(FIRST_OF_JAN_23).getVersion(); // Verify that an APPEND lock cannot be acquired on month TaskLock appendLock = appendTask.acquireAppendLockOn(JAN_23); Assert.assertNull(appendLock); // Verify that the segment is allocated for DAY granularity instead of MONTH final SegmentIdWithShardSpec pendingSegment = appendTask.allocateSegmentForTimestamp(JAN_23.getStart(), Granularities.MONTH); Assert.assertEquals(FIRST_OF_JAN_23, pendingSegment.getInterval()); Assert.assertEquals(SEGMENT_V0, pendingSegment.getVersion()); final DataSegment segmentV10 = createSegment(FIRST_OF_JAN_23, v1); commitReplaceSegments(segmentV10); verifyIntervalHasUsedSegments(FIRST_OF_JAN_23, segmentV10); verifyIntervalHasVisibleSegments(FIRST_OF_JAN_23, segmentV10); final DataSegment segmentV01 = asSegment(pendingSegment); commitAppendSegments(segmentV01); final DataSegment segmentV11 = DataSegment.builder(segmentV01) .interval(FIRST_OF_JAN_23) .version(v1) .shardSpec(new NumberedShardSpec(1, 1)) .build(); verifyIntervalHasUsedSegments(FIRST_OF_JAN_23, segmentV01, segmentV10, segmentV11); verifyIntervalHasVisibleSegments(FIRST_OF_JAN_23, segmentV10, segmentV11); } @Test public void testAllocateLockReplaceDayAppendMonth() { final SegmentIdWithShardSpec pendingSegment = appendTask.allocateSegmentForTimestamp(JAN_23.getStart(), Granularities.MONTH); Assert.assertEquals(JAN_23, pendingSegment.getInterval()); Assert.assertEquals(SEGMENT_V0, pendingSegment.getVersion()); // Verify that replace lock cannot be acquired on MONTH TaskLock replaceLock = replaceTask.acquireReplaceLockOn(FIRST_OF_JAN_23); Assert.assertNull(replaceLock); // Verify that segment cannot be committed since there is no lock final DataSegment segmentV10 = createSegment(FIRST_OF_JAN_23, SEGMENT_V0); final ISE exception = Assert.assertThrows(ISE.class, () -> commitReplaceSegments(segmentV10)); final Throwable throwable = Throwables.getRootCause(exception); Assert.assertEquals( StringUtils.format( "Segment IDs[[%s]] are not covered by locks[[]] for task[%s]", segmentV10.getId(), replaceTask.getId() ), throwable.getMessage() ); final DataSegment segmentV01 = asSegment(pendingSegment); commitAppendSegments(segmentV01); verifyIntervalHasUsedSegments(JAN_23, segmentV01); verifyIntervalHasVisibleSegments(JAN_23, segmentV01); } @Test public void testAllocateAppendMonthLockReplaceDay() { final SegmentIdWithShardSpec pendingSegment = appendTask.allocateSegmentForTimestamp(JAN_23.getStart(), Granularities.MONTH); Assert.assertEquals(JAN_23, pendingSegment.getInterval()); Assert.assertEquals(SEGMENT_V0, pendingSegment.getVersion()); final DataSegment segmentV01 = asSegment(pendingSegment); appendTask.commitAppendSegments(segmentV01); verifyIntervalHasUsedSegments(JAN_23, segmentV01); verifyIntervalHasVisibleSegments(JAN_23, segmentV01); // Verify that replace lock cannot be acquired on DAY as MONTH is already locked final TaskLock replaceLock = replaceTask.acquireReplaceLockOn(FIRST_OF_JAN_23); Assert.assertNull(replaceLock); } @Test public void testLockAllocateDayReplaceMonthAllocateAppend() { final SegmentIdWithShardSpec pendingSegmentV0 = appendTask.allocateSegmentForTimestamp(FIRST_OF_JAN_23.getStart(), Granularities.DAY); final String v1 = replaceTask.acquireReplaceLockOn(JAN_23).getVersion(); final DataSegment segmentV10 = createSegment(JAN_23, v1); commitReplaceSegments(segmentV10); verifyIntervalHasUsedSegments(JAN_23, segmentV10); final SegmentIdWithShardSpec pendingSegmentV1 = appendTask.allocateSegmentForTimestamp(FIRST_OF_JAN_23.getStart(), Granularities.DAY); Assert.assertEquals(segmentV10.getVersion(), pendingSegmentV1.getVersion()); final DataSegment segmentV00 = asSegment(pendingSegmentV0); final DataSegment segmentV11 = asSegment(pendingSegmentV1); Set<DataSegment> appendSegments = commitAppendSegments(segmentV00, segmentV11) .getSegments(); Assert.assertEquals(3, appendSegments.size()); // Segment V11 is committed Assert.assertTrue(appendSegments.remove(segmentV11)); // Segment V00 is also committed Assert.assertTrue(appendSegments.remove(segmentV00)); // Segment V00 is upgraded to v1 with MONTH granularlity at the time of commit as V12 final DataSegment segmentV12 = Iterables.getOnlyElement(appendSegments); Assert.assertEquals(v1, segmentV12.getVersion()); Assert.assertEquals(JAN_23, segmentV12.getInterval()); Assert.assertEquals(segmentV00.getLoadSpec(), segmentV12.getLoadSpec()); verifyIntervalHasUsedSegments(JAN_23, segmentV00, segmentV10, segmentV11, segmentV12); verifyIntervalHasVisibleSegments(JAN_23, segmentV10, segmentV11, segmentV12); } private static DataSegment asSegment(SegmentIdWithShardSpec pendingSegment) { final SegmentId id = pendingSegment.asSegmentId(); return DataSegment.builder(id) .loadSpec(Collections.singletonMap(id.toString(), id.toString())) .shardSpec(pendingSegment.getShardSpec()) .build(); } private void verifyIntervalHasUsedSegments(Interval interval, DataSegment... expectedSegments) { verifySegments(interval, Segments.INCLUDING_OVERSHADOWED, expectedSegments); } private void verifyIntervalHasVisibleSegments(Interval interval, DataSegment... expectedSegments) { verifySegments(interval, Segments.ONLY_VISIBLE, expectedSegments); } private void verifySegments(Interval interval, Segments visibility, DataSegment... expectedSegments) { try { Collection<DataSegment> allUsedSegments = dummyTaskActionClient.submit( new RetrieveUsedSegmentsAction( TestDataSource.WIKI, ImmutableList.of(interval), visibility ) ); Assert.assertEquals(Sets.newHashSet(expectedSegments), Sets.newHashSet(allUsedSegments)); } catch (IOException e) { throw new ISE(e, "Error while fetching used segments in interval[%s]", interval); } } private TaskToolboxFactory createToolboxFactory( TaskConfig taskConfig, TaskActionClientFactory taskActionClientFactory ) { TestTaskToolboxFactory.Builder builder = new TestTaskToolboxFactory.Builder() .setConfig(taskConfig) .setIndexIO(new IndexIO(getObjectMapper(), ColumnConfig.DEFAULT)) .setTaskActionClientFactory(taskActionClientFactory); return new TestTaskToolboxFactory(builder) { @Override public TaskToolbox build(TaskConfig config, Task task) { return createTaskToolbox(config, task, supervisorManager); } }; } private DataSegment createSegment(Interval interval, String version) { SegmentId id = SegmentId.of(TestDataSource.WIKI, interval, version, null); return DataSegment.builder() .dataSource(TestDataSource.WIKI) .interval(interval) .version(version) .loadSpec(Collections.singletonMap(id.toString(), id.toString())) .size(100) .build(); } private ActionsTestTask createAndStartTask() { ActionsTestTask task = new ActionsTestTask(TestDataSource.WIKI, "test_" + groupId.incrementAndGet(), taskActionClientFactory); taskQueue.add(task); runningTasks.add(task); return task; } private void commitReplaceSegments(DataSegment... dataSegments) { replaceTask.commitReplaceSegments(dataSegments); for (int i = 0; i < supervisorId.getValues().size(); i++) { announceUpgradedPendingSegment(pendingSegment.getValues().get(i)); } supervisorId.reset(); pendingSegment.reset(); replaceTask.finishRunAndGetStatus(); } private SegmentPublishResult commitAppendSegments(DataSegment... dataSegments) { SegmentPublishResult result = appendTask.commitAppendSegments(dataSegments); result.getSegments().forEach(this::unannounceUpgradedPendingSegment); for (DataSegment segment : dataSegments) { parentSegmentToLoadSpec.put(segment.getId().toString(), Iterables.getOnlyElement(segment.getLoadSpec().values())); } appendTask.finishRunAndGetStatus(); return result; } private void announceUpgradedPendingSegment(PendingSegmentRecord pendingSegment) { appendTask.getAnnouncedSegmentsToParentSegments() .put(pendingSegment.getId().asSegmentId(), pendingSegment.getUpgradedFromSegmentId()); } private void unannounceUpgradedPendingSegment( DataSegment segment ) { appendTask.getAnnouncedSegmentsToParentSegments() .remove(segment.getId()); } private void verifyVersionIntervalLoadSpecUniqueness() { for (DataSegment usedSegment : getAllUsedSegments()) { final String version = usedSegment.getVersion(); final Interval interval = usedSegment.getInterval(); final Object loadSpec = Iterables.getOnlyElement(usedSegment.getLoadSpec().values()); Map<Interval, Set<Object>> intervalToLoadSpecs = versionToIntervalToLoadSpecs.computeIfAbsent(version, v -> new HashMap<>()); Set<Object> loadSpecs = intervalToLoadSpecs.computeIfAbsent(interval, i -> new HashSet<>()); Assert.assertFalse(loadSpecs.contains(loadSpec)); loadSpecs.add(loadSpec); } for (Map.Entry<SegmentId, String> entry : appendTask.getAnnouncedSegmentsToParentSegments().entrySet()) { final String version = entry.getKey().getVersion(); final Interval interval = entry.getKey().getInterval(); final Object loadSpec = parentSegmentToLoadSpec.get(entry.getValue()); Map<Interval, Set<Object>> intervalToLoadSpecs = versionToIntervalToLoadSpecs.computeIfAbsent(version, v -> new HashMap<>()); Set<Object> loadSpecs = intervalToLoadSpecs.computeIfAbsent(interval, i -> new HashSet<>()); Assert.assertFalse(loadSpecs.contains(loadSpec)); loadSpecs.add(loadSpec); } } private Collection<DataSegment> getAllUsedSegments() { try { return dummyTaskActionClient.submit( new RetrieveUsedSegmentsAction( TestDataSource.WIKI, ImmutableList.of(Intervals.ETERNITY), Segments.INCLUDING_OVERSHADOWED ) ); } catch (IOException e) { throw new RuntimeException(e); } } }
apache/ignite
34,983
modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/dht/GridCacheColocatedDebugTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.distributed.dht; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.Lock; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction; import org.apache.ignite.cache.store.CacheStore; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.IgniteEx; import org.apache.ignite.internal.IgniteFutureTimeoutCheckedException; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.IgniteKernal; import org.apache.ignite.internal.processors.cache.GridCacheAdapter; import org.apache.ignite.internal.processors.cache.GridCacheContext; import org.apache.ignite.internal.processors.cache.GridCacheEntryEx; import org.apache.ignite.internal.processors.cache.GridCacheMvccCandidate; import org.apache.ignite.internal.processors.cache.GridCacheTestStore; import org.apache.ignite.internal.processors.cache.transactions.IgniteInternalTx; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.testframework.junits.WithSystemProperty; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import org.apache.ignite.transactions.Transaction; import org.apache.ignite.transactions.TransactionConcurrency; import org.apache.ignite.transactions.TransactionIsolation; import org.junit.Test; import static org.apache.ignite.IgniteSystemProperties.IGNITE_TO_STRING_MAX_LENGTH; import static org.apache.ignite.cache.CacheMode.PARTITIONED; import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC; import static org.apache.ignite.transactions.TransactionConcurrency.OPTIMISTIC; import static org.apache.ignite.transactions.TransactionConcurrency.PESSIMISTIC; import static org.apache.ignite.transactions.TransactionIsolation.READ_COMMITTED; import static org.apache.ignite.transactions.TransactionIsolation.REPEATABLE_READ; /** * Tests for colocated cache. */ public class GridCacheColocatedDebugTest extends GridCommonAbstractTest { /** Test thread count. */ private static final int THREAD_CNT = 10; /** Number of iterations (adjust for prolonged debugging). */ public static final int MAX_ITER_CNT = 10_000; /** Store enable flag. */ private boolean storeEnabled; /** {@inheritDoc} */ @SuppressWarnings("unchecked") @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName); CacheConfiguration cacheCfg = defaultCacheConfiguration(); cacheCfg.setCacheMode(PARTITIONED); cacheCfg.setNearConfiguration(null); cacheCfg.setAffinity(new RendezvousAffinityFunction(false, 30)); cacheCfg.setBackups(1); cacheCfg.setWriteSynchronizationMode(FULL_SYNC); if (storeEnabled) { cacheCfg.setCacheStoreFactory(singletonFactory(new GridCacheTestStore())); cacheCfg.setReadThrough(true); cacheCfg.setWriteThrough(true); cacheCfg.setLoadPreviousValue(true); } else cacheCfg.setCacheStoreFactory(null); cfg.setCacheConfiguration(cacheCfg); return cfg; } /** * @throws Exception If failed. */ @Test public void testSimplestPessimistic() throws Exception { checkSinglePut(false, PESSIMISTIC, REPEATABLE_READ); } /** * @throws Exception If failed. */ @Test public void testSimpleOptimistic() throws Exception { checkSinglePut(true, OPTIMISTIC, REPEATABLE_READ); } /** * @throws Exception If failed. */ @Test public void testReentry() throws Exception { checkReentry(PESSIMISTIC, REPEATABLE_READ); } /** * @throws Exception If failed. */ @Test public void testDistributedInTxSeparatePessimistic() throws Exception { checkDistributedPut(true, true, PESSIMISTIC, REPEATABLE_READ); } /** * @throws Exception If failed. */ @Test public void testDistributedInTxPessimistic() throws Exception { checkDistributedPut(true, false, PESSIMISTIC, REPEATABLE_READ); } /** * @throws Exception If failed. */ @Test public void testDistributedSeparatePessimistic() throws Exception { checkDistributedPut(false, true, PESSIMISTIC, REPEATABLE_READ); } /** * @throws Exception If failed. */ @Test public void testDistributedPessimistic() throws Exception { checkDistributedPut(false, false, PESSIMISTIC, REPEATABLE_READ); } /** * @throws Exception If failed. */ @Test public void testDistributedNonLocalInTxSeparatePessimistic() throws Exception { checkNonLocalPuts(true, true, PESSIMISTIC, REPEATABLE_READ); } /** * @throws Exception If failed. */ @Test public void testDistributedNonLocalInTxPessimistic() throws Exception { checkNonLocalPuts(true, false, PESSIMISTIC, REPEATABLE_READ); } /** * @throws Exception If failed. */ @Test public void testDistributedNonLocalSeparatePessimistic() throws Exception { checkNonLocalPuts(false, true, PESSIMISTIC, REPEATABLE_READ); } /** * @throws Exception If failed. */ @Test public void testDistributedNonLocalPessimistic() throws Exception { checkNonLocalPuts(false, false, PESSIMISTIC, REPEATABLE_READ); } /** * @throws Exception If failed. */ @Test public void testRollbackSeparatePessimistic() throws Exception { checkRollback(true, PESSIMISTIC, REPEATABLE_READ); } /** * @throws Exception If failed. */ @Test public void testDistributedInTxSeparateOptimistic() throws Exception { checkDistributedPut(true, true, OPTIMISTIC, REPEATABLE_READ); } /** * @throws Exception If failed. */ @Test public void testDistributedInTxOptimistic() throws Exception { checkDistributedPut(true, false, OPTIMISTIC, REPEATABLE_READ); } /** * @throws Exception If failed. */ @Test public void testDistributedNonLocalInTxSeparateOptimistic() throws Exception { checkNonLocalPuts(true, true, OPTIMISTIC, REPEATABLE_READ); } /** * @throws Exception If failed. */ @Test public void testDistributedNonLocalInTxOptimistic() throws Exception { checkNonLocalPuts(true, false, OPTIMISTIC, REPEATABLE_READ); } /** * @throws Exception If failed. */ @Test public void testRollbackSeparateOptimistic() throws Exception { checkRollback(true, OPTIMISTIC, REPEATABLE_READ); } /** * @throws Exception If failed. */ @Test public void testRollback() throws Exception { checkRollback(false, PESSIMISTIC, REPEATABLE_READ); } /** * @throws Exception If failed. */ @Test public void testPutsMultithreadedColocated() throws Exception { checkPutsMultithreaded(true, false, MAX_ITER_CNT); } /** * @throws Exception If failed. */ @Test public void testPutsMultithreadedRemote() throws Exception { checkPutsMultithreaded(false, true, MAX_ITER_CNT); } /** * @throws Exception If failed. */ @Test public void testPutsMultithreadedMixed() throws Exception { checkPutsMultithreaded(true, true, MAX_ITER_CNT); } /** * @param loc Local puts. * @param remote Remote puts. * @param maxIterCnt Number of iterations. * @throws Exception If failed. */ public void checkPutsMultithreaded(boolean loc, boolean remote, final long maxIterCnt) throws Exception { storeEnabled = false; assert loc || remote; startGridsMultiThreaded(3); try { final Ignite g0 = grid(0); Ignite g1 = grid(1); final Collection<Integer> keys = new ConcurrentLinkedQueue<>(); if (loc) { Integer key = -1; for (int i = 0; i < 20; i++) { key = forPrimary(g0, key); keys.add(key); } } if (remote) { Integer key = -1; for (int i = 0; i < 20; i++) { key = forPrimary(g1, key); keys.add(key); } } final AtomicLong iterCnt = new AtomicLong(); final int keysCnt = 10; IgniteInternalFuture<?> fut = multithreadedAsync(new Runnable() { @Override public void run() { // Make thread-local copy to shuffle keys. List<Integer> threadKeys = new ArrayList<>(keys); long threadId = Thread.currentThread().getId(); long itNum; while ((itNum = iterCnt.getAndIncrement()) < maxIterCnt) { Collections.shuffle(threadKeys); List<Integer> iterKeys = threadKeys.subList(0, keysCnt); Collections.sort(iterKeys); Map<Integer, String> vals = U.newLinkedHashMap(keysCnt); for (Integer key : iterKeys) vals.put(key, String.valueOf(key) + threadId); jcache(0).putAll(vals); if (itNum > 0 && itNum % 5000 == 0) info(">>> " + itNum + " iterations completed."); } } }, THREAD_CNT); fut.get(); Thread.sleep(1000); // Check that all transactions are committed. for (int i = 0; i < 3; i++) { GridCacheAdapter<Object, Object> cache = ((IgniteKernal)grid(i)).internalCache(DEFAULT_CACHE_NAME); for (Integer key : keys) { GridCacheEntryEx entry = cache.peekEx(key); if (entry != null) { Collection<GridCacheMvccCandidate> locCands = entry.localCandidates(); Collection<GridCacheMvccCandidate> rmtCands = entry.remoteMvccSnapshot(); assert locCands == null || locCands.isEmpty() : "Local candidates is not empty [idx=" + i + ", entry=" + entry + ']'; assert rmtCands == null || rmtCands.isEmpty() : "Remote candidates is not empty [idx=" + i + ", entry=" + entry + ']'; } } } } finally { stopAllGrids(); } } /** * @throws Exception If failed. */ @Test public void testLockLockedLocal() throws Exception { checkLockLocked(true); } /** * @throws Exception If failed. */ @Test public void testLockLockedRemote() throws Exception { checkLockLocked(false); } /** * * @param loc Flag indicating local or remote key should be checked. * @throws Exception If failed. */ private void checkLockLocked(boolean loc) throws Exception { storeEnabled = false; startGridsMultiThreaded(3); try { final Ignite g0 = grid(0); Ignite g1 = grid(1); final Integer key = forPrimary(loc ? g0 : g1); final CountDownLatch lockLatch = new CountDownLatch(1); final CountDownLatch unlockLatch = new CountDownLatch(1); final Lock lock = g0.cache(DEFAULT_CACHE_NAME).lock(key); IgniteInternalFuture<?> unlockFut = multithreadedAsync(new Runnable() { @Override public void run() { try { lock.lock(); try { lockLatch.countDown(); U.await(unlockLatch); } finally { lock.unlock(); } } catch (IgniteCheckedException e) { fail("Unexpected exception: " + e); } } }, 1); U.await(lockLatch); assert g0.cache(DEFAULT_CACHE_NAME).isLocalLocked(key, false); assert !g0.cache(DEFAULT_CACHE_NAME).isLocalLocked(key, true) : "Key can not be locked by current thread."; assert !lock.tryLock(); assert g0.cache(DEFAULT_CACHE_NAME).isLocalLocked(key, false); assert !g0.cache(DEFAULT_CACHE_NAME).isLocalLocked(key, true) : "Key can not be locked by current thread."; unlockLatch.countDown(); unlockFut.get(); assert lock.tryLock(); lock.unlock(); } finally { stopAllGrids(); } } /** * @throws Exception If failed. */ @Test public void testPessimisticGet() throws Exception { storeEnabled = false; startGridsMultiThreaded(3); Ignite g0 = grid(0); try { for (int i = 0; i < 100; i++) g0.cache(DEFAULT_CACHE_NAME).put(i, i); for (int i = 0; i < 100; i++) { try (Transaction tx = g0.transactions().txStart(PESSIMISTIC, REPEATABLE_READ)) { Integer val = (Integer)g0.cache(DEFAULT_CACHE_NAME).get(i); assertEquals((Integer)i, val); } } } finally { stopAllGrids(); } } /** * @param explicitTx Whether or not start implicit tx. * @param concurrency Tx concurrency. * @param isolation Tx isolation. * @throws Exception If failed. */ private void checkSinglePut(boolean explicitTx, TransactionConcurrency concurrency, TransactionIsolation isolation) throws Exception { startGrid(); try { Transaction tx = explicitTx ? grid().transactions().txStart(concurrency, isolation) : null; try { IgniteCache<Object, Object> cache = jcache(); cache.putAll(F.asMap(1, "Hello", 2, "World")); if (tx != null) tx.commit(); System.out.println(cache.localMetrics()); assertEquals("Hello", cache.get(1)); assertEquals("World", cache.get(2)); assertNull(cache.get(3)); } finally { if (tx != null) tx.close(); } } finally { stopAllGrids(); } } /** * @param concurrency Tx concurrency. * @param isolation Tx isolation. * @throws Exception If failed. */ private void checkReentry(TransactionConcurrency concurrency, TransactionIsolation isolation) throws Exception { startGrid(); try { Transaction tx = grid().transactions().txStart(concurrency, isolation); try { IgniteCache<Object, Object> cache = jcache(); String old = (String)cache.get(1); assert old == null; String replaced = (String)cache.getAndPut(1, "newVal"); assert replaced == null; replaced = (String)cache.getAndPut(1, "newVal2"); assertEquals("newVal", replaced); if (tx != null) tx.commit(); assertEquals("newVal2", cache.get(1)); assertNull(cache.get(3)); } finally { if (tx != null) tx.close(); } } finally { stopAllGrids(); } } /** * @param explicitTx Use explicit transactions. * @param separate Use one-key puts instead of batch. * @param concurrency Transactions concurrency. * @param isolation Transaction isolation. * @throws Exception If failed. */ private void checkDistributedPut(boolean explicitTx, boolean separate, TransactionConcurrency concurrency, TransactionIsolation isolation) throws Exception { storeEnabled = false; startGridsMultiThreaded(3); Ignite g0 = grid(0); Ignite g1 = grid(1); Ignite g2 = grid(2); try { Integer k0 = forPrimary(g0); Integer k1 = forPrimary(g1); Integer k2 = forPrimary(g2); Map<Integer, String> map = F.asMap(k0, "val" + k0, k1, "val" + k1, k2, "val" + k2); Transaction tx = explicitTx ? g0.transactions().txStart(concurrency, isolation) : null; try { if (separate) { g0.cache(DEFAULT_CACHE_NAME).put(k0, "val" + k0); g0.cache(DEFAULT_CACHE_NAME).put(k1, "val" + k1); g0.cache(DEFAULT_CACHE_NAME).put(k2, "val" + k2); } else g0.cache(DEFAULT_CACHE_NAME).putAll(map); if (tx != null) tx.commit(); } finally { if (tx != null) tx.close(); } if (separate) { assertEquals("val" + k0, g0.cache(DEFAULT_CACHE_NAME).get(k0)); assertEquals("val" + k1, g0.cache(DEFAULT_CACHE_NAME).get(k1)); assertEquals("val" + k2, g0.cache(DEFAULT_CACHE_NAME).get(k2)); } else { Map<Object, Object> res = g0.cache(DEFAULT_CACHE_NAME).getAll(map.keySet()); assertEquals(map, res); } tx = explicitTx ? g0.transactions().txStart(concurrency, isolation) : null; try { if (separate) { g0.cache(DEFAULT_CACHE_NAME).remove(k0); g0.cache(DEFAULT_CACHE_NAME).remove(k1); g0.cache(DEFAULT_CACHE_NAME).remove(k2); } else g0.cache(DEFAULT_CACHE_NAME).removeAll(map.keySet()); if (tx != null) tx.commit(); } finally { if (tx != null) tx.close(); } if (separate) { assertEquals(null, g0.cache(DEFAULT_CACHE_NAME).get(k0)); assertEquals(null, g0.cache(DEFAULT_CACHE_NAME).get(k1)); assertEquals(null, g0.cache(DEFAULT_CACHE_NAME).get(k2)); } else { Map<Object, Object> res = g0.cache(DEFAULT_CACHE_NAME).getAll(map.keySet()); assertTrue(res.isEmpty()); } } finally { stopAllGrids(); } } /** * @param explicitTx Use explicit transactions. * @param separate Use one-key puts instead of batch. * @param concurrency Transactions concurrency. * @param isolation Transaction isolation. * @throws Exception If failed. */ private void checkNonLocalPuts(boolean explicitTx, boolean separate, TransactionConcurrency concurrency, TransactionIsolation isolation) throws Exception { storeEnabled = false; startGridsMultiThreaded(3); Ignite g0 = grid(0); Ignite g1 = grid(1); Ignite g2 = grid(2); try { Integer k1 = forPrimary(g1); Integer k2 = forPrimary(g2); Map<Integer, String> map = F.asMap(k1, "val" + k1, k2, "val" + k2); Transaction tx = explicitTx ? g0.transactions().txStart(concurrency, isolation) : null; try { if (separate) { g0.cache(DEFAULT_CACHE_NAME).put(k1, "val" + k1); g0.cache(DEFAULT_CACHE_NAME).put(k2, "val" + k2); } else g0.cache(DEFAULT_CACHE_NAME).putAll(map); if (tx != null) tx.commit(); } finally { if (tx != null) tx.close(); } if (separate) { assertEquals("val" + k1, g0.cache(DEFAULT_CACHE_NAME).get(k1)); assertEquals("val" + k2, g0.cache(DEFAULT_CACHE_NAME).get(k2)); } else { Map<Object, Object> res = g0.cache(DEFAULT_CACHE_NAME).getAll(map.keySet()); assertEquals(map, res); } tx = explicitTx ? g0.transactions().txStart(concurrency, isolation) : null; try { if (separate) { g0.cache(DEFAULT_CACHE_NAME).remove(k1); g0.cache(DEFAULT_CACHE_NAME).remove(k2); } else g0.cache(DEFAULT_CACHE_NAME).removeAll(map.keySet()); if (tx != null) tx.commit(); } finally { if (tx != null) tx.close(); } if (separate) { assertEquals(null, g0.cache(DEFAULT_CACHE_NAME).get(k1)); assertEquals(null, g0.cache(DEFAULT_CACHE_NAME).get(k2)); } else { Map<Object, Object> res = g0.cache(DEFAULT_CACHE_NAME).getAll(map.keySet()); assertTrue(res.isEmpty()); } } finally { stopAllGrids(); } } /** * @throws Exception If failed. */ @Test public void testWriteThrough() throws Exception { storeEnabled = true; startGridsMultiThreaded(3); Ignite g0 = grid(0); Ignite g1 = grid(1); Ignite g2 = grid(2); try { // Check local commit. int k0 = forPrimary(g0); int k1 = forPrimary(g0, k0); int k2 = forPrimary(g0, k1); checkStoreWithValues(F.asMap(k0, String.valueOf(k0), k1, String.valueOf(k1), k2, String.valueOf(k2))); // Reassign keys. k1 = forPrimary(g1); k2 = forPrimary(g2); checkStoreWithValues(F.asMap(k0, String.valueOf(k0), k1, String.valueOf(k1), k2, String.valueOf(k2))); // Check remote only. checkStoreWithValues(F.asMap(k1, String.valueOf(k1), k2, String.valueOf(k2))); } finally { stopAllGrids(); } } /** * @param map Values to check. * @throws Exception If failed. */ private void checkStoreWithValues(Map<Integer, String> map) throws Exception { Ignite g0 = grid(0); Ignite g1 = grid(1); Ignite g2 = grid(2); g0.cache(DEFAULT_CACHE_NAME).putAll(map); checkStore(g0, map); checkStore(g1, Collections.<Integer, String>emptyMap()); checkStore(g2, Collections.<Integer, String>emptyMap()); clearStores(3); try (Transaction tx = g0.transactions().txStart(OPTIMISTIC, READ_COMMITTED)) { g0.cache(DEFAULT_CACHE_NAME).putAll(map); tx.commit(); checkStore(g0, map); checkStore(g1, Collections.<Integer, String>emptyMap()); checkStore(g2, Collections.<Integer, String>emptyMap()); clearStores(3); } } /** * @param ignite Grid to take store from. * @param map Expected values in store. * @throws Exception If failed. */ private void checkStore(Ignite ignite, Map<Integer, String> map) throws Exception { String cacheName = ignite.configuration().getCacheConfiguration()[1].getName(); GridCacheContext ctx = ((IgniteKernal)ignite).context().cache().internalCache(cacheName).context(); CacheStore store = ctx.store().configuredStore(); assertEquals(map, ((GridCacheTestStore)store).getMap()); } /** * Clears all stores. * * @param cnt Grid count. */ private void clearStores(int cnt) { for (int i = 0; i < cnt; i++) { IgniteEx grid = grid(i); String cacheName = grid.configuration().getCacheConfiguration()[1].getName(); GridCacheContext ctx = grid.context().cache().internalCache(cacheName).context(); CacheStore store = ctx.store().configuredStore(); ((GridCacheTestStore)store).reset(); } } /** * @param separate Use one-key puts instead of batch. * @param concurrency Transactions concurrency. * @param isolation Transaction isolation. * @throws Exception If failed. */ private void checkRollback(boolean separate, TransactionConcurrency concurrency, TransactionIsolation isolation) throws Exception { storeEnabled = false; startGridsMultiThreaded(3); Ignite g0 = grid(0); Ignite g1 = grid(1); Ignite g2 = grid(2); try { Integer k0 = forPrimary(g0); Integer k1 = forPrimary(g1); Integer k2 = forPrimary(g2); Map<Integer, String> map0 = F.asMap(k0, "val" + k0, k1, "val" + k1, k2, "val" + k2); g0.cache(DEFAULT_CACHE_NAME).putAll(map0); Map<Integer, String> map = F.asMap(k0, "value" + k0, k1, "value" + k1, k2, "value" + k2); Transaction tx = g0.transactions().txStart(concurrency, isolation); try { if (separate) { g0.cache(DEFAULT_CACHE_NAME).put(k0, "value" + k0); g0.cache(DEFAULT_CACHE_NAME).put(k1, "value" + k1); g0.cache(DEFAULT_CACHE_NAME).put(k2, "value" + k2); } else g0.cache(DEFAULT_CACHE_NAME).putAll(map); tx.rollback(); } finally { tx.close(); } if (separate) { assertEquals("val" + k0, g0.cache(DEFAULT_CACHE_NAME).get(k0)); assertEquals("val" + k1, g0.cache(DEFAULT_CACHE_NAME).get(k1)); assertEquals("val" + k2, g0.cache(DEFAULT_CACHE_NAME).get(k2)); } else { Map<Object, Object> res = g0.cache(DEFAULT_CACHE_NAME).getAll(map.keySet()); assertEquals(map0, res); } tx = g0.transactions().txStart(concurrency, isolation); try { if (separate) { g0.cache(DEFAULT_CACHE_NAME).remove(k0); g0.cache(DEFAULT_CACHE_NAME).remove(k1); g0.cache(DEFAULT_CACHE_NAME).remove(k2); } else g0.cache(DEFAULT_CACHE_NAME).removeAll(map.keySet()); tx.rollback(); } finally { tx.close(); } if (separate) { assertEquals("val" + k0, g0.cache(DEFAULT_CACHE_NAME).get(k0)); assertEquals("val" + k1, g0.cache(DEFAULT_CACHE_NAME).get(k1)); assertEquals("val" + k2, g0.cache(DEFAULT_CACHE_NAME).get(k2)); } else { Map<Object, Object> res = g0.cache(DEFAULT_CACHE_NAME).getAll(map.keySet()); assertEquals(map0, res); } } finally { stopAllGrids(); } } /** * @throws Exception If failed. */ @Test public void testExplicitLocks() throws Exception { storeEnabled = false; startGrid(); try { IgniteCache<Object, Object> cache = jcache(); Lock lock = cache.lock(1); lock.lock(); assertNull(cache.getAndPut(1, "key1")); assertEquals("key1", cache.getAndPut(1, "key2")); assertEquals("key2", cache.get(1)); lock.unlock(); } finally { stopAllGrids(); } } /** * @throws Exception If failed. */ @Test public void testExplicitLocksDistributed() throws Exception { storeEnabled = false; startGridsMultiThreaded(3); Ignite g0 = grid(0); Ignite g1 = grid(1); Ignite g2 = grid(2); try { Integer k0 = forPrimary(g0); Integer k1 = forPrimary(g1); Integer k2 = forPrimary(g2); IgniteCache<Object, Object> cache = jcache(0); Lock lock0 = cache.lock(k0); Lock lock1 = cache.lock(k1); Lock lock2 = cache.lock(k2); lock0.lock(); lock1.lock(); lock2.lock(); cache.put(k0, "val0"); cache.putAll(F.asMap(k1, "val1", k2, "val2")); assertEquals("val0", cache.get(k0)); assertEquals("val1", cache.get(k1)); assertEquals("val2", cache.get(k2)); lock0.unlock(); lock1.unlock(); lock2.unlock(); } finally { stopAllGrids(); } } /** * Version of check thread chain case for optimistic transactions. * * @throws Exception If failed. */ @Test @WithSystemProperty(key = IGNITE_TO_STRING_MAX_LENGTH, value = "100000") public void testConcurrentCheckThreadChainOptimistic() throws Exception { testConcurrentCheckThreadChain(OPTIMISTIC); } /** * Version of check thread chain case for pessimistic transactions. * * @throws Exception If failed. */ @Test @WithSystemProperty(key = IGNITE_TO_STRING_MAX_LENGTH, value = "100000") public void testConcurrentCheckThreadChainPessimistic() throws Exception { testConcurrentCheckThreadChain(PESSIMISTIC); } /** * Covers scenario when thread chain locks acquisition for XID 1 should be continued during unsuccessful attempt * to acquire lock on certain key for XID 2 (XID 1 with uncompleted chain becomes owner of this key instead). * * @throws Exception If failed. */ protected void testConcurrentCheckThreadChain(TransactionConcurrency txConcurrency) throws Exception { storeEnabled = false; startGrid(0); try { final AtomicLong iterCnt = new AtomicLong(); int commonKey = 1000; int otherKeyPickVariance = 10; int otherKeysCnt = 5; int maxIterCnt = MAX_ITER_CNT * 10; IgniteInternalFuture<?> fut = multithreadedAsync(new Runnable() { @Override public void run() { long threadId = Thread.currentThread().getId(); long itNum; while ((itNum = iterCnt.getAndIncrement()) < maxIterCnt) { Map<Integer, String> vals = U.newLinkedHashMap(otherKeysCnt * 2 + 1); for (int i = 0; i < otherKeysCnt; i++) { int key = ThreadLocalRandom.current().nextInt( otherKeyPickVariance * i, otherKeyPickVariance * (i + 1)); vals.put(key, String.valueOf(key) + threadId); } vals.put(commonKey, String.valueOf(commonKey) + threadId); for (int i = 0; i < otherKeysCnt; i++) { int key = ThreadLocalRandom.current().nextInt( commonKey + otherKeyPickVariance * (i + 1), otherKeyPickVariance * (i + 2) + commonKey); vals.put(key, String.valueOf(key) + threadId); } try (Transaction tx = grid(0).transactions().txStart(txConcurrency, READ_COMMITTED)) { jcache(0).putAll(vals); tx.commit(); } if (itNum > 0 && itNum % 5000 == 0) info(">>> " + itNum + " iterations completed."); } } }, THREAD_CNT); while (true) { long prevIterCnt = iterCnt.get(); try { fut.get(5_000); break; } catch (IgniteFutureTimeoutCheckedException ignored) { if (iterCnt.get() == prevIterCnt) { Collection<IgniteInternalTx> hangingTxes = ignite(0).context().cache().context().tm().activeTransactions(); fail(hangingTxes.toString()); } } } } finally { stopAllGrids(); } } /** * Gets key for which given node is primary. * * @param g Grid. * @return Key. */ private static Integer forPrimary(Ignite g) { return forPrimary(g, -1); } /** * Gets next key for which given node is primary, starting with (prev + 1) * * @param g Grid. * @param prev Previous key. * @return Key. */ private static Integer forPrimary(Ignite g, int prev) { for (int i = prev + 1; i < 10000; i++) { if (g.affinity(DEFAULT_CACHE_NAME).mapKeyToNode(i).id().equals(g.cluster().localNode().id())) return i; } throw new IllegalArgumentException("Can not find key being primary for node: " + g.cluster().localNode().id()); } }
googleapis/google-cloud-java
35,058
java-billing/proto-google-cloud-billing-v1/src/main/java/com/google/cloud/billing/v1/ListSkusResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/billing/v1/cloud_catalog.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.billing.v1; /** * * * <pre> * Response message for `ListSkus`. * </pre> * * Protobuf type {@code google.cloud.billing.v1.ListSkusResponse} */ public final class ListSkusResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.billing.v1.ListSkusResponse) ListSkusResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListSkusResponse.newBuilder() to construct. private ListSkusResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListSkusResponse() { skus_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListSkusResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.billing.v1.CloudCatalogProto .internal_static_google_cloud_billing_v1_ListSkusResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.billing.v1.CloudCatalogProto .internal_static_google_cloud_billing_v1_ListSkusResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.billing.v1.ListSkusResponse.class, com.google.cloud.billing.v1.ListSkusResponse.Builder.class); } public static final int SKUS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.billing.v1.Sku> skus_; /** * * * <pre> * The list of public SKUs of the given service. * </pre> * * <code>repeated .google.cloud.billing.v1.Sku skus = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.billing.v1.Sku> getSkusList() { return skus_; } /** * * * <pre> * The list of public SKUs of the given service. * </pre> * * <code>repeated .google.cloud.billing.v1.Sku skus = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.billing.v1.SkuOrBuilder> getSkusOrBuilderList() { return skus_; } /** * * * <pre> * The list of public SKUs of the given service. * </pre> * * <code>repeated .google.cloud.billing.v1.Sku skus = 1;</code> */ @java.lang.Override public int getSkusCount() { return skus_.size(); } /** * * * <pre> * The list of public SKUs of the given service. * </pre> * * <code>repeated .google.cloud.billing.v1.Sku skus = 1;</code> */ @java.lang.Override public com.google.cloud.billing.v1.Sku getSkus(int index) { return skus_.get(index); } /** * * * <pre> * The list of public SKUs of the given service. * </pre> * * <code>repeated .google.cloud.billing.v1.Sku skus = 1;</code> */ @java.lang.Override public com.google.cloud.billing.v1.SkuOrBuilder getSkusOrBuilder(int index) { return skus_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve the next page of results. To retrieve the next page, * call `ListSkus` again with the `page_token` field set to this * value. This field is empty if there are no more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token to retrieve the next page of results. To retrieve the next page, * call `ListSkus` again with the `page_token` field set to this * value. This field is empty if there are no more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < skus_.size(); i++) { output.writeMessage(1, skus_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < skus_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, skus_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.billing.v1.ListSkusResponse)) { return super.equals(obj); } com.google.cloud.billing.v1.ListSkusResponse other = (com.google.cloud.billing.v1.ListSkusResponse) obj; if (!getSkusList().equals(other.getSkusList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getSkusCount() > 0) { hash = (37 * hash) + SKUS_FIELD_NUMBER; hash = (53 * hash) + getSkusList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.billing.v1.ListSkusResponse parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.billing.v1.ListSkusResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.billing.v1.ListSkusResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.billing.v1.ListSkusResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.billing.v1.ListSkusResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.billing.v1.ListSkusResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.billing.v1.ListSkusResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.billing.v1.ListSkusResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.billing.v1.ListSkusResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.billing.v1.ListSkusResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.billing.v1.ListSkusResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.billing.v1.ListSkusResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.billing.v1.ListSkusResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for `ListSkus`. * </pre> * * Protobuf type {@code google.cloud.billing.v1.ListSkusResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.billing.v1.ListSkusResponse) com.google.cloud.billing.v1.ListSkusResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.billing.v1.CloudCatalogProto .internal_static_google_cloud_billing_v1_ListSkusResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.billing.v1.CloudCatalogProto .internal_static_google_cloud_billing_v1_ListSkusResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.billing.v1.ListSkusResponse.class, com.google.cloud.billing.v1.ListSkusResponse.Builder.class); } // Construct using com.google.cloud.billing.v1.ListSkusResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (skusBuilder_ == null) { skus_ = java.util.Collections.emptyList(); } else { skus_ = null; skusBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.billing.v1.CloudCatalogProto .internal_static_google_cloud_billing_v1_ListSkusResponse_descriptor; } @java.lang.Override public com.google.cloud.billing.v1.ListSkusResponse getDefaultInstanceForType() { return com.google.cloud.billing.v1.ListSkusResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.billing.v1.ListSkusResponse build() { com.google.cloud.billing.v1.ListSkusResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.billing.v1.ListSkusResponse buildPartial() { com.google.cloud.billing.v1.ListSkusResponse result = new com.google.cloud.billing.v1.ListSkusResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(com.google.cloud.billing.v1.ListSkusResponse result) { if (skusBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { skus_ = java.util.Collections.unmodifiableList(skus_); bitField0_ = (bitField0_ & ~0x00000001); } result.skus_ = skus_; } else { result.skus_ = skusBuilder_.build(); } } private void buildPartial0(com.google.cloud.billing.v1.ListSkusResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.billing.v1.ListSkusResponse) { return mergeFrom((com.google.cloud.billing.v1.ListSkusResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.billing.v1.ListSkusResponse other) { if (other == com.google.cloud.billing.v1.ListSkusResponse.getDefaultInstance()) return this; if (skusBuilder_ == null) { if (!other.skus_.isEmpty()) { if (skus_.isEmpty()) { skus_ = other.skus_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureSkusIsMutable(); skus_.addAll(other.skus_); } onChanged(); } } else { if (!other.skus_.isEmpty()) { if (skusBuilder_.isEmpty()) { skusBuilder_.dispose(); skusBuilder_ = null; skus_ = other.skus_; bitField0_ = (bitField0_ & ~0x00000001); skusBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getSkusFieldBuilder() : null; } else { skusBuilder_.addAllMessages(other.skus_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.billing.v1.Sku m = input.readMessage(com.google.cloud.billing.v1.Sku.parser(), extensionRegistry); if (skusBuilder_ == null) { ensureSkusIsMutable(); skus_.add(m); } else { skusBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.billing.v1.Sku> skus_ = java.util.Collections.emptyList(); private void ensureSkusIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { skus_ = new java.util.ArrayList<com.google.cloud.billing.v1.Sku>(skus_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.billing.v1.Sku, com.google.cloud.billing.v1.Sku.Builder, com.google.cloud.billing.v1.SkuOrBuilder> skusBuilder_; /** * * * <pre> * The list of public SKUs of the given service. * </pre> * * <code>repeated .google.cloud.billing.v1.Sku skus = 1;</code> */ public java.util.List<com.google.cloud.billing.v1.Sku> getSkusList() { if (skusBuilder_ == null) { return java.util.Collections.unmodifiableList(skus_); } else { return skusBuilder_.getMessageList(); } } /** * * * <pre> * The list of public SKUs of the given service. * </pre> * * <code>repeated .google.cloud.billing.v1.Sku skus = 1;</code> */ public int getSkusCount() { if (skusBuilder_ == null) { return skus_.size(); } else { return skusBuilder_.getCount(); } } /** * * * <pre> * The list of public SKUs of the given service. * </pre> * * <code>repeated .google.cloud.billing.v1.Sku skus = 1;</code> */ public com.google.cloud.billing.v1.Sku getSkus(int index) { if (skusBuilder_ == null) { return skus_.get(index); } else { return skusBuilder_.getMessage(index); } } /** * * * <pre> * The list of public SKUs of the given service. * </pre> * * <code>repeated .google.cloud.billing.v1.Sku skus = 1;</code> */ public Builder setSkus(int index, com.google.cloud.billing.v1.Sku value) { if (skusBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSkusIsMutable(); skus_.set(index, value); onChanged(); } else { skusBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of public SKUs of the given service. * </pre> * * <code>repeated .google.cloud.billing.v1.Sku skus = 1;</code> */ public Builder setSkus(int index, com.google.cloud.billing.v1.Sku.Builder builderForValue) { if (skusBuilder_ == null) { ensureSkusIsMutable(); skus_.set(index, builderForValue.build()); onChanged(); } else { skusBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of public SKUs of the given service. * </pre> * * <code>repeated .google.cloud.billing.v1.Sku skus = 1;</code> */ public Builder addSkus(com.google.cloud.billing.v1.Sku value) { if (skusBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSkusIsMutable(); skus_.add(value); onChanged(); } else { skusBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of public SKUs of the given service. * </pre> * * <code>repeated .google.cloud.billing.v1.Sku skus = 1;</code> */ public Builder addSkus(int index, com.google.cloud.billing.v1.Sku value) { if (skusBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSkusIsMutable(); skus_.add(index, value); onChanged(); } else { skusBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of public SKUs of the given service. * </pre> * * <code>repeated .google.cloud.billing.v1.Sku skus = 1;</code> */ public Builder addSkus(com.google.cloud.billing.v1.Sku.Builder builderForValue) { if (skusBuilder_ == null) { ensureSkusIsMutable(); skus_.add(builderForValue.build()); onChanged(); } else { skusBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of public SKUs of the given service. * </pre> * * <code>repeated .google.cloud.billing.v1.Sku skus = 1;</code> */ public Builder addSkus(int index, com.google.cloud.billing.v1.Sku.Builder builderForValue) { if (skusBuilder_ == null) { ensureSkusIsMutable(); skus_.add(index, builderForValue.build()); onChanged(); } else { skusBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of public SKUs of the given service. * </pre> * * <code>repeated .google.cloud.billing.v1.Sku skus = 1;</code> */ public Builder addAllSkus( java.lang.Iterable<? extends com.google.cloud.billing.v1.Sku> values) { if (skusBuilder_ == null) { ensureSkusIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, skus_); onChanged(); } else { skusBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of public SKUs of the given service. * </pre> * * <code>repeated .google.cloud.billing.v1.Sku skus = 1;</code> */ public Builder clearSkus() { if (skusBuilder_ == null) { skus_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { skusBuilder_.clear(); } return this; } /** * * * <pre> * The list of public SKUs of the given service. * </pre> * * <code>repeated .google.cloud.billing.v1.Sku skus = 1;</code> */ public Builder removeSkus(int index) { if (skusBuilder_ == null) { ensureSkusIsMutable(); skus_.remove(index); onChanged(); } else { skusBuilder_.remove(index); } return this; } /** * * * <pre> * The list of public SKUs of the given service. * </pre> * * <code>repeated .google.cloud.billing.v1.Sku skus = 1;</code> */ public com.google.cloud.billing.v1.Sku.Builder getSkusBuilder(int index) { return getSkusFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of public SKUs of the given service. * </pre> * * <code>repeated .google.cloud.billing.v1.Sku skus = 1;</code> */ public com.google.cloud.billing.v1.SkuOrBuilder getSkusOrBuilder(int index) { if (skusBuilder_ == null) { return skus_.get(index); } else { return skusBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of public SKUs of the given service. * </pre> * * <code>repeated .google.cloud.billing.v1.Sku skus = 1;</code> */ public java.util.List<? extends com.google.cloud.billing.v1.SkuOrBuilder> getSkusOrBuilderList() { if (skusBuilder_ != null) { return skusBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(skus_); } } /** * * * <pre> * The list of public SKUs of the given service. * </pre> * * <code>repeated .google.cloud.billing.v1.Sku skus = 1;</code> */ public com.google.cloud.billing.v1.Sku.Builder addSkusBuilder() { return getSkusFieldBuilder().addBuilder(com.google.cloud.billing.v1.Sku.getDefaultInstance()); } /** * * * <pre> * The list of public SKUs of the given service. * </pre> * * <code>repeated .google.cloud.billing.v1.Sku skus = 1;</code> */ public com.google.cloud.billing.v1.Sku.Builder addSkusBuilder(int index) { return getSkusFieldBuilder() .addBuilder(index, com.google.cloud.billing.v1.Sku.getDefaultInstance()); } /** * * * <pre> * The list of public SKUs of the given service. * </pre> * * <code>repeated .google.cloud.billing.v1.Sku skus = 1;</code> */ public java.util.List<com.google.cloud.billing.v1.Sku.Builder> getSkusBuilderList() { return getSkusFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.billing.v1.Sku, com.google.cloud.billing.v1.Sku.Builder, com.google.cloud.billing.v1.SkuOrBuilder> getSkusFieldBuilder() { if (skusBuilder_ == null) { skusBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.billing.v1.Sku, com.google.cloud.billing.v1.Sku.Builder, com.google.cloud.billing.v1.SkuOrBuilder>( skus_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); skus_ = null; } return skusBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve the next page of results. To retrieve the next page, * call `ListSkus` again with the `page_token` field set to this * value. This field is empty if there are no more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token to retrieve the next page of results. To retrieve the next page, * call `ListSkus` again with the `page_token` field set to this * value. This field is empty if there are no more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token to retrieve the next page of results. To retrieve the next page, * call `ListSkus` again with the `page_token` field set to this * value. This field is empty if there are no more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token to retrieve the next page of results. To retrieve the next page, * call `ListSkus` again with the `page_token` field set to this * value. This field is empty if there are no more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token to retrieve the next page of results. To retrieve the next page, * call `ListSkus` again with the `page_token` field set to this * value. This field is empty if there are no more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.billing.v1.ListSkusResponse) } // @@protoc_insertion_point(class_scope:google.cloud.billing.v1.ListSkusResponse) private static final com.google.cloud.billing.v1.ListSkusResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.billing.v1.ListSkusResponse(); } public static com.google.cloud.billing.v1.ListSkusResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListSkusResponse> PARSER = new com.google.protobuf.AbstractParser<ListSkusResponse>() { @java.lang.Override public ListSkusResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListSkusResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListSkusResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.billing.v1.ListSkusResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/jclouds
35,054
blobstore/src/test/java/org/jclouds/blobstore/integration/internal/BaseContainerIntegrationTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jclouds.blobstore.integration.internal; import static com.google.common.base.Charsets.UTF_8; import static com.google.common.base.Throwables.propagateIfPossible; import static com.google.common.collect.Iterables.get; import static com.google.common.hash.Hashing.md5; import static org.assertj.core.api.Assertions.assertThat; import static org.jclouds.blobstore.options.ListContainerOptions.Builder.afterMarker; import static org.jclouds.blobstore.options.ListContainerOptions.Builder.inDirectory; import static org.jclouds.blobstore.options.ListContainerOptions.Builder.maxResults; import static org.jclouds.utils.TestUtils.NO_INVOCATIONS; import static org.jclouds.utils.TestUtils.SINGLE_NO_ARG_INVOCATION; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertTrue; import java.io.File; import java.io.IOException; import java.util.HashSet; import java.util.Iterator; import java.util.Random; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import jakarta.ws.rs.core.MediaType; import org.jclouds.blobstore.BlobStore; import org.jclouds.blobstore.attr.ConsistencyModel; import org.jclouds.blobstore.domain.Blob; import org.jclouds.blobstore.domain.BlobMetadata; import org.jclouds.blobstore.domain.ContainerAccess; import org.jclouds.blobstore.domain.PageSet; import org.jclouds.blobstore.domain.StorageMetadata; import org.jclouds.blobstore.options.ListContainerOptions; import org.jclouds.http.HttpRequest; import org.jclouds.http.HttpResponse; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.io.ByteSource; import com.google.common.util.concurrent.Uninterruptibles; public class BaseContainerIntegrationTest extends BaseBlobStoreIntegrationTest { @Test(groups = { "integration", "live" }) public void containerDoesntExist() { Random random = new Random(); assert !view.getBlobStore().containerExists("forgetaboutit" + random.nextInt(Integer.MAX_VALUE)); assert !view.getBlobStore().containerExists("cloudcachestorefunctionalintegrationtest-first" + random.nextInt(Integer.MAX_VALUE)); } @Test(groups = { "integration", "live" }) // TODO: the test name does not describe its behavior public void testPutTwiceIsOkAndDoesntOverwrite() throws InterruptedException { String containerName = getContainerName(); try { view.getBlobStore().createContainerInLocation(null, containerName); Blob blob = view.getBlobStore().blobBuilder("hello").payload(TEST_STRING).build(); view.getBlobStore().putBlob(containerName, blob); view.getBlobStore().createContainerInLocation(null, containerName); awaitConsistency(); assertEquals(view.getBlobStore().countBlobs(containerName), 1); } finally { returnContainer(containerName); } } @Test public void testListMarkerAfterLastKey() throws Exception { String key = "hello"; String containerName = getContainerName(); try { addBlobToContainer(containerName, // NOTE all metadata in jclouds comes out as lowercase, in an effort to // normalize the providers. view.getBlobStore().blobBuilder(key).userMetadata(ImmutableMap.of("Adrian", "powderpuff")) .payload(TEST_STRING).contentType(MediaType.TEXT_PLAIN) .contentMD5(md5().hashString(TEST_STRING, UTF_8).asBytes()) .build()); validateContent(containerName, key); awaitConsistency(); PageSet<? extends StorageMetadata> container = view.getBlobStore().list(containerName, afterMarker(key)); assertThat(container).isEmpty(); } finally { returnContainer(containerName); } } @Test public void testListContainerWithZeroMaxResults() throws Exception { String key = "hello"; String containerName = getContainerName(); try { addBlobToContainer(containerName, // NOTE all metadata in jclouds comes out as lowercase, in an effort to // normalize the providers. view.getBlobStore().blobBuilder(key).userMetadata(ImmutableMap.of("Adrian", "powderpuff")) .payload(TEST_STRING).contentType(MediaType.TEXT_PLAIN) .contentMD5(md5().hashString(TEST_STRING, UTF_8).asBytes()) .build()); awaitConsistency(); validateContent(containerName, key); PageSet<? extends StorageMetadata> container = view.getBlobStore().list(containerName, maxResults(0)); assertThat(container).isEmpty(); } finally { returnContainer(containerName); } } @Test(groups = { "integration", "live" }) public void testWithDetails() throws InterruptedException, IOException { String key = "hello"; String containerName = getContainerName(); try { addBlobToContainer(containerName, // NOTE all metadata in jclouds comes out as lowercase, in an effort to // normalize the providers. view.getBlobStore().blobBuilder(key).userMetadata(ImmutableMap.of("Adrian", "powderpuff")) .payload(TEST_STRING).contentType(MediaType.TEXT_PLAIN) .contentMD5(md5().hashString(TEST_STRING, UTF_8).asBytes()) .build()); awaitConsistency(); validateContent(containerName, key); PageSet<? extends StorageMetadata> container = view.getBlobStore().list(containerName, maxResults(1).withDetails()); BlobMetadata metadata = BlobMetadata.class.cast(get(container, 0)); assert metadata.getContentMetadata().getContentType().startsWith("text/plain") : metadata.getContentMetadata() .getContentType(); assertEquals(metadata.getContentMetadata().getContentLength(), Long.valueOf(TEST_STRING.length())); assertEquals(metadata.getUserMetadata().get("adrian"), "powderpuff"); checkMD5(metadata); } finally { returnContainer(containerName); } } protected void checkMD5(BlobMetadata metadata) throws IOException { assertEquals(metadata.getContentMetadata().getContentMD5(), md5().hashString(TEST_STRING, UTF_8).asBytes()); } @Test(groups = { "integration", "live" }) public void testClearWhenContentsUnderPath() throws InterruptedException { String containerName = getContainerName(); try { add5BlobsUnderPathAnd5UnderRootToContainer(containerName); view.getBlobStore().clearContainer(containerName); assertConsistencyAwareContainerSize(containerName, 0); } finally { returnContainer(containerName); } } @Test(groups = { "integration", "live" }) public void testClearWithOptions() throws InterruptedException { String containerName = getContainerName(); try { ListContainerOptions options; // Should wipe out all objects, as there are empty folders // above add5NestedBlobsToContainer(containerName); options = new ListContainerOptions(); options.prefix("path/1/"); options.recursive(); view.getBlobStore().clearContainer(containerName, options); assertConsistencyAwareContainerSize(containerName, 0); view.getBlobStore().clearContainer(containerName); add5NestedBlobsToContainer(containerName); options = new ListContainerOptions(); options.prefix("path/1/2/3"); options.recursive(); view.getBlobStore().clearContainer(containerName, options); assertConsistencyAwareBlobExists(containerName, "path/1/a"); assertConsistencyAwareBlobExists(containerName, "path/1/2/b"); assertConsistencyAwareBlobDoesntExist(containerName, "path/1/2/3"); view.getBlobStore().clearContainer(containerName); add5NestedBlobsToContainer(containerName); options = new ListContainerOptions(); options.prefix("path/1/2/3/4/"); options.recursive(); view.getBlobStore().clearContainer(containerName, options); assertConsistencyAwareBlobExists(containerName, "path/1/a"); assertConsistencyAwareBlobExists(containerName, "path/1/2/b"); assertConsistencyAwareBlobExists(containerName, "path/1/2/3/5/e"); assertConsistencyAwareBlobDoesntExist(containerName, "path/1/2/3/4"); // non-recursive, should not clear anything, as prefix does not match view.getBlobStore().clearContainer(containerName); add5NestedBlobsToContainer(containerName); options = new ListContainerOptions(); options.prefix("path/1/2/3"); view.getBlobStore().clearContainer(containerName, options); assertConsistencyAwareBlobExists(containerName, "path/1/a"); assertConsistencyAwareBlobExists(containerName, "path/1/2/b"); assertConsistencyAwareBlobExists(containerName, "path/1/2/3/c"); assertConsistencyAwareBlobExists(containerName, "path/1/2/3/5/e"); // non-recursive, should only clear path/1/2/3/c view.getBlobStore().clearContainer(containerName); add5NestedBlobsToContainer(containerName); options = new ListContainerOptions(); options.prefix("path/1/2/3/"); view.getBlobStore().clearContainer(containerName, options); assertConsistencyAwareBlobExists(containerName, "path/1/a"); assertConsistencyAwareBlobExists(containerName, "path/1/2/b"); assertConsistencyAwareBlobExists(containerName, "path/1/2/3/4/d"); assertConsistencyAwareBlobDoesntExist(containerName, "path/1/2/3/c"); // non-recursive, should only clear path/1/2/3/c view.getBlobStore().clearContainer(containerName); add5NestedBlobsToContainer(containerName); options = new ListContainerOptions(); options.prefix("path/1/2/3/c"); view.getBlobStore().clearContainer(containerName, options); assertConsistencyAwareBlobExists(containerName, "path/1/a"); assertConsistencyAwareBlobExists(containerName, "path/1/2/b"); assertConsistencyAwareBlobExists(containerName, "path/1/2/3/4/d"); assertConsistencyAwareBlobDoesntExist(containerName, "path/1/2/3/c"); } finally { returnContainer(containerName); } } @Test(groups = { "integration", "live" }) public void testListContainerMarker() throws InterruptedException { String containerName = getContainerName(); try { addAlphabetUnderRoot(containerName); PageSet<? extends StorageMetadata> container = view.getBlobStore().list(containerName, maxResults(1)); assert container.getNextMarker() != null; assertEquals(container.size(), 1); String marker = container.getNextMarker(); container = view.getBlobStore().list(containerName, afterMarker(marker)); assertEquals(container.getNextMarker(), null); assert container.size() == 25 : String.format("size should have been 25, but was %d: %s", container.size(), container); assert container.getNextMarker() == null; } finally { returnContainer(containerName); } } @Test(groups = { "integration", "live" }) public void testListRootUsesDelimiter() throws InterruptedException { String containerName = getContainerName(); try { String prefix = "rootdelimiter"; addTenObjectsUnderPrefix(containerName, prefix); add15UnderRoot(containerName); awaitConsistency(); PageSet<? extends StorageMetadata> container = view.getBlobStore().list(containerName); assert container.getNextMarker() == null; assertEquals(container.size(), 16); } finally { returnContainer(containerName); } } @Test(groups = { "integration", "live" }) public void testDirectory() throws InterruptedException { String containerName = getContainerName(); try { String directory = "directory"; assert !view.getBlobStore().directoryExists(containerName, directory); view.getBlobStore().createDirectory(containerName, directory); assert view.getBlobStore().directoryExists(containerName, directory); PageSet<? extends StorageMetadata> container = view.getBlobStore().list(containerName); // we should have only the directory under root assert container.getNextMarker() == null; assert container.size() == 1 : container; container = view.getBlobStore().list(containerName, inDirectory(directory)); // we should have nothing in the directory assert container.getNextMarker() == null; assert container.size() == 0 : container; addTenObjectsUnderPrefix(containerName, directory); awaitConsistency(); container = view.getBlobStore().list(containerName); // we should get back the subdir entry and the directory marker assert container.getNextMarker() == null; assertThat(container).hasSize(2); container = view.getBlobStore().list(containerName, inDirectory(directory)); // we should have only the 10 items under the directory assert container.getNextMarker() == null; assert container.size() == 10 : container; // try 2 level deep directory assert !view.getBlobStore().directoryExists(containerName, directory + "/" + directory); view.getBlobStore().createDirectory(containerName, directory + "/" + directory); awaitConsistency(); assert view.getBlobStore().directoryExists(containerName, directory + "/" + directory); view.getBlobStore().clearContainer(containerName, inDirectory(directory)); awaitConsistency(); assert view.getBlobStore().directoryExists(containerName, directory); assertThat(view.getBlobStore().directoryExists(containerName, directory + "/" + directory)).isFalse(); // should have only the 2 level-deep directory above container = view.getBlobStore().list(containerName, inDirectory(directory)); assert container.getNextMarker() == null; assertThat(container).hasSize(0); view.getBlobStore().createDirectory(containerName, directory + "/" + directory); awaitConsistency(); container = view.getBlobStore().list(containerName, inDirectory(directory).recursive()); assert container.getNextMarker() == null; assert container.size() == 1 : container; view.getBlobStore().clearContainer(containerName, inDirectory(directory).recursive()); // should no longer have the 2 level-deep directory above container = view.getBlobStore().list(containerName, inDirectory(directory)); assert container.getNextMarker() == null; assert container.size() == 0 : container; container = view.getBlobStore().list(containerName); // should only have the directory assert container.getNextMarker() == null; assert container.size() == 1 : container; view.getBlobStore().deleteDirectory(containerName, directory); container = view.getBlobStore().list(containerName); // now should be completely empty assert container.getNextMarker() == null; assert container.size() == 0 : container; } finally { returnContainer(containerName); } } @Test(groups = { "integration", "live" }) public void testListContainerPrefix() throws InterruptedException { String containerName = getContainerName(); try { String prefix = "containerprefix"; addTenObjectsUnderPrefix(containerName, prefix); add15UnderRoot(containerName); awaitConsistency(); PageSet<? extends StorageMetadata> container = view.getBlobStore().list( containerName, new ListContainerOptions().prefix(prefix + "/").delimiter("/")); assert container.getNextMarker() == null; assertEquals(container.size(), 10); } finally { returnContainer(containerName); } } @Test(groups = { "integration", "live" }) public void testListContainerMaxResults() throws InterruptedException { String containerName = getContainerName(); try { addAlphabetUnderRoot(containerName); PageSet<? extends StorageMetadata> container; ListContainerOptions options = maxResults(10); container = view.getBlobStore().list(containerName, options); assertThat(container).hasSize(10); assertThat(container.getNextMarker()).isNotNull(); container = view.getBlobStore().list(containerName, options.afterMarker(container.getNextMarker())); assertThat(container).hasSize(10); assertThat(container.getNextMarker()).isNotNull(); container = view.getBlobStore().list(containerName, options.afterMarker(container.getNextMarker())); assertThat(container).hasSize(6); assertThat(container.getNextMarker()).isNull(); } finally { returnContainer(containerName); } } @Test(dataProvider = "ignoreOnWindows", groups = { "integration", "live" }) public void testDelimiter() throws Exception { String containerName = getContainerName(); try { for (String blobName : new String[] { "asdf", "boo" + File.separator + "bar", "boo" + File.separator + "baz" + File.separator + "xyzzy", "cquux" + File.separator + "thud", "cquux" + File.separator + "bla" }) { Blob blob = view.getBlobStore().blobBuilder(blobName).payload(TEST_STRING).build(); addBlobToContainer(containerName, blob); } // test root directory without marker PageSet<? extends StorageMetadata> pageSet = view.getBlobStore().list(containerName); assertThat(pageSet).hasSize(3); assertThat(pageSet.getNextMarker()).isNull(); // list root directory with marker ListContainerOptions options = new ListContainerOptions().maxResults(1); pageSet = view.getBlobStore().list(containerName, options); assertThat(pageSet).hasSize(1); assertThat(pageSet.iterator().next().getName()).isEqualTo("asdf"); assertThat(pageSet.getNextMarker()).isNotNull(); options.afterMarker(pageSet.getNextMarker()); pageSet = view.getBlobStore().list(containerName, options); assertThat(pageSet).hasSize(1); assertThat(pageSet.iterator().next().getName()).isEqualTo("boo/"); assertThat(pageSet.getNextMarker()).isNotNull(); options.afterMarker(pageSet.getNextMarker()); pageSet = view.getBlobStore().list(containerName, options); assertThat(pageSet).hasSize(1); assertThat(pageSet.iterator().next().getName()).isEqualTo("cquux/"); assertThat(pageSet.getNextMarker()).isNull(); // list child directory with marker options = new ListContainerOptions().inDirectory("boo").maxResults(1); pageSet = view.getBlobStore().list(containerName, options); assertThat(pageSet).hasSize(1); assertThat(pageSet.iterator().next().getName()).isEqualTo("boo/bar"); assertThat(pageSet.getNextMarker()).isNotNull(); options.afterMarker(pageSet.getNextMarker()); pageSet = view.getBlobStore().list(containerName, options); assertThat(pageSet).hasSize(1); assertThat(pageSet.iterator().next().getName()).isEqualTo("boo/baz/"); assertThat(pageSet.getNextMarker()).isNull(); // list child directory without marker options = new ListContainerOptions().inDirectory("boo").maxResults(2); pageSet = view.getBlobStore().list(containerName, options); assertThat(pageSet).hasSize(2); Iterator<? extends StorageMetadata> it = pageSet.iterator(); assertThat(it.next().getName()).isEqualTo("boo/bar"); assertThat(it.next().getName()).isEqualTo("boo/baz/"); assertThat(pageSet.getNextMarker()).isNull(); } finally { returnContainer(containerName); } } @Test(groups = { "integration", "live" }) public void containerExists() throws InterruptedException { String containerName = getContainerName(); try { assert view.getBlobStore().containerExists(containerName); } finally { returnContainer(containerName); } } @Test(groups = { "integration", "live" }) public void deleteContainerWithContents() throws InterruptedException { String containerName = getContainerName(); try { addBlobToContainer(containerName, "test"); view.getBlobStore().deleteContainer(containerName); awaitConsistency(); assertNotExists(containerName); } finally { recycleContainerAndAddToPool(containerName); } } @Test(groups = { "integration", "live" }) public void deleteContainerWithoutContents() throws InterruptedException { final String containerName = getContainerName(); try { view.getBlobStore().deleteContainer(containerName); awaitConsistency(); assertNotExists(containerName); } finally { // this container is now deleted, so we can't reuse it directly recycleContainerAndAddToPool(containerName); } } @Test(groups = { "integration", "live" }) public void deleteContainerIfEmptyWithContents() throws InterruptedException { String containerName = getContainerName(); try { addBlobToContainer(containerName, "test"); awaitConsistency(); assertFalse(view.getBlobStore().deleteContainerIfEmpty(containerName)); awaitConsistency(); assertTrue(view.getBlobStore().containerExists(containerName)); } finally { recycleContainerAndAddToPool(containerName); } } @Test(groups = { "integration", "live" }) public void deleteContainerIfEmptyWithoutContents() throws InterruptedException { final String containerName = getContainerName(); try { assertTrue(view.getBlobStore().deleteContainerIfEmpty(containerName)); awaitConsistency(); assertNotExists(containerName); // verify that true is returned even if the container does not exist assertTrue(view.getBlobStore().deleteContainerIfEmpty(containerName)); } finally { // this container is now deleted, so we can't reuse it directly recycleContainerAndAddToPool(containerName); } } @Test(groups = { "integration", "live" }) public void testListContainer() throws InterruptedException, ExecutionException, TimeoutException { String containerName = getContainerName(); try { add15UnderRoot(containerName); awaitConsistency(); Set<? extends StorageMetadata> container = view.getBlobStore().list(containerName); assertEquals(container.size(), 15); } finally { returnContainer(containerName); } } @Test(groups = { "integration", "live" }) public void testListContainerGetBlobSize() throws Exception { String containerName = getContainerName(); try { ByteSource byteSource = ByteSource.wrap(new byte[42]); for (int i = 0; i < 2; i++) { view.getBlobStore().putBlob(containerName, view.getBlobStore() .blobBuilder(i + "") .payload(byteSource) .contentLength(byteSource.size()) .build()); } PageSet<? extends StorageMetadata> container = view.getBlobStore().list(containerName); for (StorageMetadata metadata : container) { assertEquals(metadata.getSize(), Long.valueOf(byteSource.size())); } } finally { returnContainer(containerName); } } @Test(groups = { "integration", "live" }) public void testSetContainerAccess() throws Exception { BlobStore blobStore = view.getBlobStore(); String containerName = getContainerName(); try { assertThat(blobStore.getContainerAccess(containerName)).isEqualTo(ContainerAccess.PRIVATE); blobStore.setContainerAccess(containerName, ContainerAccess.PUBLIC_READ); assertThat(blobStore.getContainerAccess(containerName)).isEqualTo(ContainerAccess.PUBLIC_READ); String blobName = "blob"; blobStore.putBlob(containerName, blobStore.blobBuilder(blobName).payload("").build()); // test that blob is anonymously readable HttpRequest request = view.getSigner().signGetBlob(containerName, blobName).toBuilder() .replaceQueryParams(ImmutableMap.<String, String>of()).build(); HttpResponse response = view.utils().http().invoke(request); assertThat(response.getStatusCode()).isEqualTo(200); blobStore.setContainerAccess(containerName, ContainerAccess.PRIVATE); assertThat(blobStore.getContainerAccess(containerName)).isEqualTo(ContainerAccess.PRIVATE); } finally { recycleContainerAndAddToPool(containerName); } } @Test(groups = {"integration", "live"}) public void testContainerListWithPrefix() throws InterruptedException { final String containerName = getContainerName(); BlobStore blobStore = view.getBlobStore(); String prefix = "blob"; try { blobStore.putBlob(containerName, blobStore.blobBuilder(prefix).payload("").build()); blobStore.putBlob(containerName, blobStore.blobBuilder(prefix + "foo").payload("").build()); blobStore.putBlob(containerName, blobStore.blobBuilder(prefix + "bar").payload("").build()); blobStore.putBlob(containerName, blobStore.blobBuilder("foo").payload("").build()); checkEqualNames(ImmutableSet.of(prefix, prefix + "foo", prefix + "bar"), blobStore.list(containerName, ListContainerOptions.Builder.prefix(prefix))); } finally { returnContainer(containerName); } } @Test(groups = {"integration", "live"}) public void testContainerListWithDetails() throws InterruptedException { final String containerName = getContainerName(); BlobStore blobStore = view.getBlobStore(); String prefix = "testContainerListWithDetails/"; try { blobStore.putBlob(containerName, blobStore.blobBuilder(prefix + "foo/bar").payload("").build()); blobStore.putBlob(containerName, blobStore.blobBuilder(prefix + "car").payload("").build()); checkEqualNames( ImmutableSet.of(prefix + "foo/", prefix + "car"), blobStore.list(containerName, ListContainerOptions.Builder.prefix(prefix).delimiter("/")) ); checkEqualNames( ImmutableSet.of(prefix + "foo/", prefix + "car"), blobStore.list(containerName, ListContainerOptions.Builder.prefix(prefix).delimiter("/").withDetails()) ); } finally { returnContainer(containerName); } } @Test(groups = {"integration", "live"}) public void testDelimiterList() throws InterruptedException { final String containerName = getContainerName(); BlobStore blobStore = view.getBlobStore(); String payload = "foo"; try { blobStore.putBlob(containerName, blobStore.blobBuilder("test-foo-foo").payload(payload).build()); blobStore.putBlob(containerName, blobStore.blobBuilder("test-bar-foo").payload(payload).build()); blobStore.putBlob(containerName, blobStore.blobBuilder("foo").payload(payload).build()); // NOTE: the test does not work if we use a file separator character ("/" or "\"), as the file system blob // store will create directories when putting such a blob. When listing results, these directories will also // show up in the result set. checkEqualNames(ImmutableSet.of("foo", "test-"), blobStore.list(containerName, ListContainerOptions.Builder.delimiter("-"))); checkEqualNames(ImmutableSet.of("test-foo-foo", "test-bar-foo", "foo"), blobStore.list(containerName, ListContainerOptions.Builder.delimiter("."))); blobStore.putBlob(containerName, blobStore.blobBuilder("bar").payload(payload).build()); blobStore.putBlob(containerName, blobStore.blobBuilder("bazar").payload(payload).build()); checkEqualNames(ImmutableSet.of("bar", "baza"), blobStore.list(containerName, ListContainerOptions.Builder.delimiter("a").prefix("ba"))); } finally { returnContainer(containerName); } } /** Test that listing with a marker prefix matches the first key with that prefix. */ @Test public void testListMarkerPrefix() throws Exception { BlobStore blobStore = view.getBlobStore(); final String container = getContainerName(); try { blobStore.createContainerInLocation(null, container); blobStore.putBlob(container, blobStore.blobBuilder("a/a").payload("").build()); blobStore.putBlob(container, blobStore.blobBuilder("b/b").payload("").build()); ListContainerOptions options = new ListContainerOptions().afterMarker("b/").recursive(); PageSet<? extends StorageMetadata> res = blobStore.list(container, options); assertThat(res).hasSize(1); assertThat(res.iterator().next().getName()).isEqualTo("b/b"); } finally { returnContainer(container); } } /** Test that listing with an empty string for prefix and delimiter returns all of the keys. */ @Test(groups = {"integration", "live"}) public void testListEmptyPrefixDelimiter() throws Exception { final String container = getContainerName(); BlobStore blobStore = view.getBlobStore(); blobStore.createContainerInLocation(null, container); try { ImmutableList<String> blobs = ImmutableList.of("a", "b", "c"); for (String blob : blobs) { blobStore.putBlob(container, blobStore.blobBuilder(blob).payload("").build()); } ListContainerOptions options = ListContainerOptions.Builder.delimiter("") .prefix("").afterMarker(""); PageSet<? extends StorageMetadata> rs = blobStore.list(container, options); ImmutableList.Builder<String> builder = ImmutableList.builder(); for (StorageMetadata sm : rs) { builder.add(sm.getName()); } assertThat(builder.build()).containsExactlyElementsOf(blobs); } finally { returnContainer(container); } } @DataProvider public Object[][] getBlobsToEscape() { ImmutableSet<String> testNames = ImmutableSet.of("%20", "%20 ", " %20", " ", "%", "%%"); Object[][] result = new Object[1][1]; result[0][0] = testNames; return result; } @Test(dataProvider = "getBlobsToEscape", groups = {"integration", "live"}) public void testBlobNameEscaping(Set<String> blobNames) throws InterruptedException { final String containerName = getContainerName(); BlobStore blobStore = view.getBlobStore(); try { for (String name : blobNames) { Blob blob = blobStore.blobBuilder(name).payload(ByteSource.wrap("test".getBytes())).contentLength(4) .build(); blobStore.putBlob(containerName, blob); } checkEqualNames(blobNames, blobStore.list(containerName)); } finally { returnContainer(containerName); } } private void checkEqualNames(Set<String> expectedSet, PageSet<? extends StorageMetadata> results) { Set<String> names = new HashSet<String>(); for (StorageMetadata sm : results) { names.add(sm.getName()); } assertThat(names).containsOnlyElementsOf(expectedSet); } protected void addAlphabetUnderRoot(String containerName) throws InterruptedException { for (char letter = 'a'; letter <= 'z'; letter++) { view.getBlobStore().putBlob(containerName, view.getBlobStore().blobBuilder(letter + "").payload(letter + "content").build()); } assertContainerSize(containerName, 26); } protected void assertContainerSize(final String containerName, final int size) throws InterruptedException { assertConsistencyAware(new Runnable() { public void run() { try { assertEquals(view.getBlobStore().countBlobs(containerName), size); } catch (Exception e) { propagateIfPossible(e); } } }); } protected void add15UnderRoot(String containerName) throws InterruptedException { for (int i = 0; i < 15; i++) { view.getBlobStore().putBlob(containerName, view.getBlobStore().blobBuilder(i + "").payload(i + "content").build()); } } protected void addTenObjectsUnderPrefix(String containerName, String prefix) throws InterruptedException { for (int i = 0; i < 10; i++) { view.getBlobStore().putBlob(containerName, view.getBlobStore().blobBuilder(prefix + "/" + i).payload(i + "content").build()); } } protected void awaitConsistency() { if (view.getConsistencyModel() == ConsistencyModel.EVENTUAL) { Uninterruptibles.sleepUninterruptibly(AWAIT_CONSISTENCY_TIMEOUT_SECONDS, TimeUnit.SECONDS); } } @DataProvider public Object[][] ignoreOnWindows() { return isWindowsOs() ? NO_INVOCATIONS : SINGLE_NO_ARG_INVOCATION; } private static boolean isWindowsOs() { return System.getProperty("os.name", "").toLowerCase().contains("windows"); } }
apache/hadoop
35,268
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/component/instance/TestComponentInstance.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.service.component.instance; import org.apache.hadoop.util.Lists; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerExitStatus; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.LocalizationState; import org.apache.hadoop.yarn.api.records.LocalizationStatus; import org.apache.hadoop.yarn.service.MockRunningServiceContext; import org.apache.hadoop.yarn.service.ServiceContext; import org.apache.hadoop.yarn.service.ServiceScheduler; import org.apache.hadoop.yarn.service.ServiceTestUtils; import org.apache.hadoop.yarn.service.api.records.Configuration; import org.apache.hadoop.yarn.service.TestServiceManager; import org.apache.hadoop.yarn.service.api.records.ConfigFile; import org.apache.hadoop.yarn.service.api.records.Container; import org.apache.hadoop.yarn.service.api.records.ContainerState; import org.apache.hadoop.yarn.service.api.records.Service; import org.apache.hadoop.yarn.service.component.Component; import org.apache.hadoop.yarn.service.component.ComponentEvent; import org.apache.hadoop.yarn.service.component.ComponentEventType; import org.apache.hadoop.yarn.service.component.TestComponent; import org.apache.hadoop.yarn.service.utils.ServiceUtils; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; import org.mockito.Mockito; import java.nio.file.Files; import java.nio.file.Paths; import java.nio.file.StandardOpenOption; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** * Tests for {@link ComponentInstance}. */ public class TestComponentInstance { @RegisterExtension private ServiceTestUtils.ServiceFSWatcher rule = new ServiceTestUtils.ServiceFSWatcher(); @Test public void testContainerUpgrade() throws Exception { ServiceContext context = TestComponent.createTestContext(rule, "testContainerUpgrade"); Component component = context.scheduler.getAllComponents().entrySet() .iterator().next().getValue(); upgradeComponent(component); ComponentInstance instance = component.getAllComponentInstances().iterator() .next(); ComponentInstanceEvent instanceEvent = new ComponentInstanceEvent( instance.getContainer().getId(), ComponentInstanceEventType.UPGRADE); instance.handle(instanceEvent); Container containerSpec = component.getComponentSpec().getContainer( instance.getContainer().getId().toString()); assertEquals(ContainerState.UPGRADING, containerSpec.getState(), "instance not upgrading"); } @Test public void testContainerReadyAfterUpgrade() throws Exception { ServiceContext context = TestComponent.createTestContext(rule, "testContainerReadyAfterUpgrade"); Component component = context.scheduler.getAllComponents().entrySet() .iterator().next().getValue(); upgradeComponent(component); ComponentInstance instance = component.getAllComponentInstances().iterator() .next(); ComponentInstanceEvent instanceEvent = new ComponentInstanceEvent( instance.getContainer().getId(), ComponentInstanceEventType.UPGRADE); instance.handle(instanceEvent); instance.handle(new ComponentInstanceEvent(instance.getContainer().getId(), ComponentInstanceEventType.START)); assertEquals(ContainerState.RUNNING_BUT_UNREADY, component.getComponentSpec().getContainer(instance.getContainer() .getId().toString()).getState(), "instance not running"); instance.handle(new ComponentInstanceEvent(instance.getContainer().getId(), ComponentInstanceEventType.BECOME_READY)); assertEquals(ContainerState.READY, component.getComponentSpec().getContainer(instance.getContainer() .getId().toString()).getState(), "instance not ready"); } @Test public void testContainerUpgradeFailed() throws Exception { ServiceContext context = TestComponent.createTestContext(rule, "testContainerUpgradeFailed"); Component component = context.scheduler.getAllComponents().entrySet() .iterator().next().getValue(); upgradeComponent(component); ComponentInstance instance = component.getAllComponentInstances().iterator() .next(); ComponentInstanceEvent upgradeEvent = new ComponentInstanceEvent( instance.getContainer().getId(), ComponentInstanceEventType.UPGRADE); instance.handle(upgradeEvent); ContainerStatus containerStatus = mock(ContainerStatus.class); when(containerStatus.getExitStatus()).thenReturn( ContainerExitStatus.ABORTED); ComponentInstanceEvent stopEvent = new ComponentInstanceEvent( instance.getContainer().getId(), ComponentInstanceEventType.STOP) .setStatus(containerStatus); // this is the call back from NM for the upgrade instance.handle(stopEvent); assertEquals(ContainerState.FAILED_UPGRADE, component.getComponentSpec().getContainer(instance.getContainer() .getId().toString()).getState(), "instance did not fail"); } @Test public void testFailureAfterReinit() throws Exception { ServiceContext context = TestComponent.createTestContext(rule, "testContainerUpgradeFailed"); Component component = context.scheduler.getAllComponents().entrySet() .iterator().next().getValue(); upgradeComponent(component); ComponentInstance instance = component.getAllComponentInstances().iterator() .next(); ComponentInstanceEvent upgradeEvent = new ComponentInstanceEvent( instance.getContainer().getId(), ComponentInstanceEventType.UPGRADE); instance.handle(upgradeEvent); // NM finished updgrae instance.handle(new ComponentInstanceEvent(instance.getContainer().getId(), ComponentInstanceEventType.START)); assertEquals(ContainerState.RUNNING_BUT_UNREADY, component.getComponentSpec().getContainer(instance.getContainer() .getId().toString()).getState(), "instance not running"); ContainerStatus containerStatus = mock(ContainerStatus.class); when(containerStatus.getExitStatus()).thenReturn( ContainerExitStatus.ABORTED); ComponentInstanceEvent stopEvent = new ComponentInstanceEvent( instance.getContainer().getId(), ComponentInstanceEventType.STOP) .setStatus(containerStatus); // this is the call back from NM for the upgrade instance.handle(stopEvent); assertEquals(ContainerState.FAILED_UPGRADE, component.getComponentSpec().getContainer(instance.getContainer() .getId().toString()).getState(), "instance did not fail"); } @Test public void testCancelNothingToUpgrade() throws Exception { ServiceContext context = TestComponent.createTestContext(rule, "testCancelUpgradeWhenContainerReady"); Component component = context.scheduler.getAllComponents().entrySet() .iterator().next().getValue(); cancelCompUpgrade(component); ComponentInstance instance = component.getAllComponentInstances().iterator() .next(); ComponentInstanceEvent cancelEvent = new ComponentInstanceEvent( instance.getContainer().getId(), ComponentInstanceEventType.CANCEL_UPGRADE); instance.handle(cancelEvent); assertEquals(ContainerState.READY, component.getComponentSpec().getContainer(instance.getContainer() .getId().toString()).getState(), "instance not ready"); } @Test public void testCancelUpgradeFailed() throws Exception { ServiceContext context = TestComponent.createTestContext(rule, "testCancelUpgradeFailed"); Component component = context.scheduler.getAllComponents().entrySet() .iterator().next().getValue(); cancelCompUpgrade(component); ComponentInstance instance = component.getAllComponentInstances().iterator() .next(); ComponentInstanceEvent cancelEvent = new ComponentInstanceEvent( instance.getContainer().getId(), ComponentInstanceEventType.CANCEL_UPGRADE); instance.handle(cancelEvent); instance.handle(new ComponentInstanceEvent(instance.getContainer().getId(), ComponentInstanceEventType.STOP)); assertEquals(ComponentInstanceState.INIT, instance.getState(), "instance not init"); } @Test public void testCancelAfterCompProcessedCancel() throws Exception { ServiceContext context = TestComponent.createTestContext(rule, "testCancelAfterCompProcessedCancel"); Component component = context.scheduler.getAllComponents().entrySet() .iterator().next().getValue(); upgradeComponent(component); cancelCompUpgrade(component); ComponentInstance instance = component.getAllComponentInstances().iterator() .next(); ComponentInstanceEvent upgradeEvent = new ComponentInstanceEvent( instance.getContainer().getId(), ComponentInstanceEventType.UPGRADE); instance.handle(upgradeEvent); assertEquals(ContainerState.NEEDS_UPGRADE, component.getComponentSpec().getContainer(instance.getContainer() .getId().toString()).getState(), "instance should start upgrading"); } @Test public void testCancelWhileUpgradeWithSuccess() throws Exception { validateCancelWhileUpgrading(true, true); } @Test public void testCancelWhileUpgradeWithFailure() throws Exception { validateCancelWhileUpgrading(false, true); } @Test public void testCancelFailedWhileUpgradeWithSuccess() throws Exception { validateCancelWhileUpgrading(true, false); } @Test public void testCancelFailedWhileUpgradeWithFailure() throws Exception { validateCancelWhileUpgrading(false, false); } @Test public void testUpdateLocalizationStatuses() throws Exception { Service def = TestServiceManager.createBaseDef( "testUpdateLocalizationStatuses"); String file1 = rule.getServiceBasePath().toString() + "/file1"; Files.write(Paths.get(file1), "test file".getBytes(), StandardOpenOption.CREATE_NEW); org.apache.hadoop.yarn.service.api.records.Component compDef = def.getComponents().iterator().next(); ConfigFile configFile1 = new ConfigFile(); configFile1.setType(ConfigFile.TypeEnum.STATIC); configFile1.setSrcFile(file1); compDef.setConfiguration(new Configuration().files( Lists.newArrayList(configFile1))); ServiceContext context = new MockRunningServiceContext(rule, def); Component component = context.scheduler.getAllComponents().get( compDef.getName()); ComponentInstance instance = component.getAllComponentInstances().iterator() .next(); LocalizationStatus status = LocalizationStatus.newInstance("file1", LocalizationState.PENDING); instance.updateLocalizationStatuses(Lists.newArrayList(status)); assertTrue(instance.isLclRetrieverActive(), "retriever should still be active"); Container container = instance.getContainerSpec(); assertTrue(container.getLocalizationStatuses() != null); assertEquals(container.getLocalizationStatuses().get(0).getDestFile(), status.getResourceKey(), "dest file"); assertEquals(container.getLocalizationStatuses().get(0).getState(), status.getLocalizationState(), "state"); status = LocalizationStatus.newInstance("file1", LocalizationState.COMPLETED); instance.updateLocalizationStatuses(Lists.newArrayList(status)); assertTrue(!instance.isLclRetrieverActive(), "retriever should not be active"); assertTrue(container.getLocalizationStatuses() != null); assertEquals(container.getLocalizationStatuses().get(0).getDestFile(), status.getResourceKey(), "dest file"); assertEquals(container.getLocalizationStatuses().get(0).getState(), status.getLocalizationState(), "state"); } private void validateCancelWhileUpgrading(boolean upgradeSuccessful, boolean cancelUpgradeSuccessful) throws Exception { ServiceContext context = TestComponent.createTestContext(rule, "testCancelWhileUpgrading"); Component component = context.scheduler.getAllComponents().entrySet() .iterator().next().getValue(); upgradeComponent(component); ComponentInstance instance = component.getAllComponentInstances().iterator() .next(); ComponentInstanceEvent upgradeEvent = new ComponentInstanceEvent( instance.getContainer().getId(), ComponentInstanceEventType.UPGRADE); instance.handle(upgradeEvent); assertEquals(ContainerState.UPGRADING, component.getComponentSpec().getContainer(instance.getContainer() .getId().toString()).getState(), "instance should be upgrading"); cancelCompUpgrade(component); ComponentInstanceEvent cancelEvent = new ComponentInstanceEvent( instance.getContainer().getId(), ComponentInstanceEventType.CANCEL_UPGRADE); instance.handle(cancelEvent); // either upgrade failed or successful if (upgradeSuccessful) { instance.handle(new ComponentInstanceEvent( instance.getContainer().getId(), ComponentInstanceEventType.START)); instance.handle(new ComponentInstanceEvent( instance.getContainer().getId(), ComponentInstanceEventType.BECOME_READY)); } else { instance.handle(new ComponentInstanceEvent( instance.getContainer().getId(), ComponentInstanceEventType.STOP)); } assertEquals(ContainerState.UPGRADING, component.getComponentSpec().getContainer(instance.getContainer() .getId().toString()).getState(), "instance not upgrading"); // response for cancel received if (cancelUpgradeSuccessful) { instance.handle(new ComponentInstanceEvent( instance.getContainer().getId(), ComponentInstanceEventType.START)); instance.handle(new ComponentInstanceEvent( instance.getContainer().getId(), ComponentInstanceEventType.BECOME_READY)); } else { instance.handle(new ComponentInstanceEvent( instance.getContainer().getId(), ComponentInstanceEventType.STOP)); } if (cancelUpgradeSuccessful) { assertEquals(ContainerState.READY, component.getComponentSpec().getContainer(instance.getContainer() .getId().toString()).getState(), "instance not ready"); } else { assertEquals(ComponentInstanceState.INIT, instance.getState(), "instance not init"); } } private void upgradeComponent(Component component) { component.handle(new ComponentEvent(component.getName(), ComponentEventType.UPGRADE).setTargetSpec(component.getComponentSpec()) .setUpgradeVersion("v2")); } private void cancelCompUpgrade(Component component) { component.handle(new ComponentEvent(component.getName(), ComponentEventType.CANCEL_UPGRADE) .setTargetSpec(component.getComponentSpec()) .setUpgradeVersion("v1")); } private Component createComponent(ServiceScheduler scheduler, org.apache.hadoop.yarn.service.api.records.Component.RestartPolicyEnum restartPolicy, int nSucceededInstances, int nFailedInstances, int totalAsk, int componentId) { assert (nSucceededInstances + nFailedInstances) <= totalAsk; Component comp = mock(Component.class); org.apache.hadoop.yarn.service.api.records.Component componentSpec = mock( org.apache.hadoop.yarn.service.api.records.Component.class); when(componentSpec.getRestartPolicy()).thenReturn(restartPolicy); Configuration conf = new Configuration(); when(componentSpec.getConfiguration()).thenReturn(conf); when(comp.getRestartPolicyHandler()).thenReturn( Component.getRestartPolicyHandler(restartPolicy)); when(componentSpec.getNumberOfContainers()).thenReturn( Long.valueOf(totalAsk)); when(comp.getComponentSpec()).thenReturn(componentSpec); when(comp.getScheduler()).thenReturn(scheduler); Map<String, ComponentInstance> succeeded = new ConcurrentHashMap<>(); Map<String, ComponentInstance> failed = new ConcurrentHashMap<>(); scheduler.getAllComponents().put("comp" + componentId, comp); Map<String, ComponentInstance> componentInstances = new HashMap<>(); for (int i = 0; i < nSucceededInstances; i++) { ComponentInstance componentInstance = createComponentInstance(comp, i); componentInstances.put(componentInstance.getCompInstanceName(), componentInstance); succeeded.put(componentInstance.getCompInstanceName(), componentInstance); } for (int i = 0; i < nFailedInstances; i++) { ComponentInstance componentInstance = createComponentInstance(comp, i + nSucceededInstances); componentInstances.put(componentInstance.getCompInstanceName(), componentInstance); failed.put(componentInstance.getCompInstanceName(), componentInstance); } int delta = totalAsk - nFailedInstances - nSucceededInstances; for (int i = 0; i < delta; i++) { ComponentInstance componentInstance = createComponentInstance(comp, i + nSucceededInstances + nFailedInstances); componentInstances.put(componentInstance.getCompInstanceName(), componentInstance); } when(comp.getAllComponentInstances()).thenReturn( componentInstances.values()); when(comp.getSucceededInstances()).thenReturn(succeeded.values()); when(comp.getFailedInstances()).thenReturn(failed.values()); return comp; } private Component createComponent(ServiceScheduler scheduler, org.apache.hadoop.yarn.service.api.records.Component.RestartPolicyEnum restartPolicy, int totalAsk, int componentId) { Component comp = mock(Component.class); org.apache.hadoop.yarn.service.api.records.Component componentSpec = mock( org.apache.hadoop.yarn.service.api.records.Component.class); when(componentSpec.getRestartPolicy()).thenReturn(restartPolicy); Configuration conf = new Configuration(); when(componentSpec.getConfiguration()).thenReturn(conf); when(comp.getRestartPolicyHandler()).thenReturn( Component.getRestartPolicyHandler(restartPolicy)); when(componentSpec.getNumberOfContainers()).thenReturn( Long.valueOf(totalAsk)); when(comp.getComponentSpec()).thenReturn(componentSpec); when(comp.getScheduler()).thenReturn(scheduler); scheduler.getAllComponents().put("comp" + componentId, comp); Map<String, ComponentInstance> componentInstances = new HashMap<>(); for (int i = 0; i < totalAsk; i++) { ComponentInstance componentInstance = createComponentInstance(comp, i); componentInstances.put(componentInstance.getCompInstanceName(), componentInstance); } when(comp.getAllComponentInstances()).thenReturn( componentInstances.values()); return comp; } private ComponentInstance createComponentInstance(Component component, int instanceId) { ComponentInstance componentInstance = mock(ComponentInstance.class); when(componentInstance.getComponent()).thenReturn(component); when(componentInstance.getCompInstanceName()).thenReturn( "compInstance" + instanceId); Container container = mock(Container.class); when(componentInstance.getContainerSpec()).thenReturn(container); ServiceUtils.ProcessTerminationHandler terminationHandler = mock( ServiceUtils.ProcessTerminationHandler.class); when(component.getScheduler().getTerminationHandler()).thenReturn( terminationHandler); return componentInstance; } @Test public void testComponentRestartPolicy() { Map<String, Component> allComponents = new HashMap<>(); Service mockService = mock(Service.class); ServiceContext serviceContext = mock(ServiceContext.class); when(serviceContext.getService()).thenReturn(mockService); ServiceScheduler serviceSchedulerInstance = new ServiceScheduler( serviceContext); ServiceScheduler serviceScheduler = spy(serviceSchedulerInstance); when(serviceScheduler.getAllComponents()).thenReturn(allComponents); Mockito.doNothing().when(serviceScheduler).setGracefulStop( any(FinalApplicationStatus.class)); final String containerDiag = "Container succeeded"; ComponentInstanceEvent componentInstanceEvent = mock( ComponentInstanceEvent.class); ContainerId containerId = ContainerId.newContainerId(ApplicationAttemptId .newInstance(ApplicationId.newInstance(1234L, 1), 1), 1); ContainerStatus containerStatus = ContainerStatus.newInstance(containerId, org.apache.hadoop.yarn.api.records.ContainerState.COMPLETE, containerDiag, 0); when(componentInstanceEvent.getStatus()).thenReturn(containerStatus); // Test case1: one component, one instance, restart policy = ALWAYS, exit=0 Component comp = createComponent(serviceScheduler, org.apache.hadoop.yarn.service.api.records.Component .RestartPolicyEnum.ALWAYS, 1, 0, 1, 0); ComponentInstance componentInstance = comp.getAllComponentInstances().iterator().next(); ComponentInstance.handleComponentInstanceRelaunch(componentInstance, componentInstanceEvent, false, containerDiag); verify(comp, never()).markAsSucceeded(any(ComponentInstance.class)); verify(comp, never()).markAsFailed(any(ComponentInstance.class)); verify(comp, times(1)).reInsertPendingInstance( any(ComponentInstance.class)); verify(serviceScheduler.getTerminationHandler(), never()).terminate( anyInt()); // Test case2: one component, one instance, restart policy = ALWAYS, exit=1 comp = createComponent(serviceScheduler, org.apache.hadoop.yarn.service.api.records.Component .RestartPolicyEnum.ALWAYS, 0, 1, 1, 0); componentInstance = comp.getAllComponentInstances().iterator().next(); containerStatus.setExitStatus(1); ComponentInstance.handleComponentInstanceRelaunch(componentInstance, componentInstanceEvent, false, containerDiag); verify(comp, never()).markAsSucceeded(any(ComponentInstance.class)); verify(comp, never()).markAsFailed(any(ComponentInstance.class)); verify(comp, times(1)).reInsertPendingInstance( any(ComponentInstance.class)); verify(serviceScheduler.getTerminationHandler(), never()).terminate( anyInt()); // Test case3: one component, one instance, restart policy = NEVER, exit=0 // Should exit with code=0 comp = createComponent(serviceScheduler, org.apache.hadoop.yarn.service.api.records.Component .RestartPolicyEnum.NEVER, 1, 0, 1, 0); componentInstance = comp.getAllComponentInstances().iterator().next(); containerStatus.setExitStatus(0); Map<String, ComponentInstance> succeededInstances = new HashMap<>(); succeededInstances.put(componentInstance.getCompInstanceName(), componentInstance); when(comp.getSucceededInstances()).thenReturn(succeededInstances.values()); when(comp.getNumSucceededInstances()).thenReturn(new Long(1)); ComponentInstance.handleComponentInstanceRelaunch(componentInstance, componentInstanceEvent, false, containerDiag); verify(comp, times(1)).markAsSucceeded(any(ComponentInstance.class)); verify(comp, never()).markAsFailed(any(ComponentInstance.class)); verify(comp, times(0)).reInsertPendingInstance( any(ComponentInstance.class)); verify(serviceScheduler.getTerminationHandler(), times(1)).terminate(eq(0)); // Test case4: one component, one instance, restart policy = NEVER, exit=1 // Should exit with code=-1 comp = createComponent(serviceScheduler, org.apache.hadoop.yarn.service.api.records.Component .RestartPolicyEnum.NEVER, 0, 1, 1, 0); componentInstance = comp.getAllComponentInstances().iterator().next(); containerStatus.setExitStatus(-1); when(comp.getNumFailedInstances()).thenReturn(new Long(1)); ComponentInstance.handleComponentInstanceRelaunch(componentInstance, componentInstanceEvent, false, containerDiag); verify(comp, never()).markAsSucceeded(any(ComponentInstance.class)); verify(comp, times(1)).markAsFailed(any(ComponentInstance.class)); verify(comp, times(0)).reInsertPendingInstance( any(ComponentInstance.class)); verify(serviceScheduler.getTerminationHandler(), times(1)).terminate( eq(-1)); // Test case5: one component, one instance, restart policy = ON_FAILURE, // exit=1 // Should continue run. comp = createComponent(serviceScheduler, org.apache.hadoop.yarn.service.api.records.Component .RestartPolicyEnum.ON_FAILURE, 0, 1, 1, 0); componentInstance = comp.getAllComponentInstances().iterator().next(); containerStatus.setExitStatus(1); ComponentInstance.handleComponentInstanceRelaunch(componentInstance, componentInstanceEvent, false, containerDiag); verify(comp, never()).markAsSucceeded(any(ComponentInstance.class)); verify(comp, never()).markAsFailed(any(ComponentInstance.class)); verify(comp, times(1)).reInsertPendingInstance( any(ComponentInstance.class)); verify(serviceScheduler.getTerminationHandler(), times(0)).terminate( anyInt()); // Test case6: one component, 3 instances, restart policy = NEVER, exit=1 // 2 of the instances not completed, it should continue run. comp = createComponent(serviceScheduler, org.apache.hadoop.yarn.service.api.records.Component .RestartPolicyEnum.NEVER, 0, 1, 3, 0); componentInstance = comp.getAllComponentInstances().iterator().next(); containerStatus.setExitStatus(1); ComponentInstance.handleComponentInstanceRelaunch(componentInstance, componentInstanceEvent, false, containerDiag); verify(comp, never()).markAsSucceeded(any(ComponentInstance.class)); verify(comp, times(1)).markAsFailed(any(ComponentInstance.class)); verify(comp, times(0)).reInsertPendingInstance( any(ComponentInstance.class)); verify(serviceScheduler.getTerminationHandler(), times(0)).terminate( anyInt()); // Test case7: one component, 3 instances, restart policy = ON_FAILURE, // exit=1 // 2 of the instances completed, it should continue run. comp = createComponent(serviceScheduler, org.apache.hadoop.yarn.service.api.records.Component .RestartPolicyEnum.ON_FAILURE, 0, 1, 3, 0); Iterator<ComponentInstance> iter = comp.getAllComponentInstances().iterator(); containerStatus.setExitStatus(1); ComponentInstance commponentInstance = iter.next(); ComponentInstance.handleComponentInstanceRelaunch(commponentInstance, componentInstanceEvent, false, containerDiag); verify(comp, never()).markAsSucceeded(any(ComponentInstance.class)); verify(comp, never()).markAsFailed(any(ComponentInstance.class)); verify(comp, times(1)).reInsertPendingInstance( any(ComponentInstance.class)); verify(serviceScheduler.getTerminationHandler(), times(0)).terminate( anyInt()); // Test case8: 2 components, 2 instances for each // comp2 already finished. // comp1 has a new instance finish, we should terminate the service comp = createComponent(serviceScheduler, org.apache.hadoop.yarn.service.api.records.Component .RestartPolicyEnum.NEVER, 2, 0); Collection<ComponentInstance> component1Instances = comp.getAllComponentInstances(); containerStatus.setExitStatus(-1); Component comp2 = createComponent( componentInstance.getComponent().getScheduler(), org.apache.hadoop.yarn.service.api.records.Component .RestartPolicyEnum.NEVER, 2, 1); Collection<ComponentInstance> component2Instances = comp2.getAllComponentInstances(); Map<String, ComponentInstance> failed2Instances = new HashMap<>(); for (ComponentInstance component2Instance : component2Instances) { failed2Instances.put(component2Instance.getCompInstanceName(), component2Instance); when(component2Instance.getComponent().getFailedInstances()).thenReturn( failed2Instances.values()); when(component2Instance.getComponent().getNumFailedInstances()) .thenReturn(new Long(failed2Instances.size())); ComponentInstance.handleComponentInstanceRelaunch(component2Instance, componentInstanceEvent, false, containerDiag); } Map<String, ComponentInstance> failed1Instances = new HashMap<>(); // 2nd component, already finished. for (ComponentInstance component1Instance : component1Instances) { failed1Instances.put(component1Instance.getCompInstanceName(), component1Instance); when(component1Instance.getComponent().getFailedInstances()).thenReturn( failed1Instances.values()); when(component1Instance.getComponent().getNumFailedInstances()) .thenReturn(new Long(failed1Instances.size())); ComponentInstance.handleComponentInstanceRelaunch(component1Instance, componentInstanceEvent, false, containerDiag); } verify(comp, never()).markAsSucceeded(any(ComponentInstance.class)); verify(comp, times(2)).markAsFailed(any(ComponentInstance.class)); verify(comp, times(0)).reInsertPendingInstance( any(ComponentInstance.class)); verify(serviceScheduler.getTerminationHandler(), times(1)).terminate( eq(-1)); // Test case9: 2 components, 2 instances for each // comp2 already finished. // comp1 has a new instance finish, we should terminate the service // All instance finish with 0, service should exit with 0 as well. containerStatus.setExitStatus(0); comp = createComponent(serviceScheduler, org.apache.hadoop.yarn.service.api.records.Component .RestartPolicyEnum.ON_FAILURE, 2, 0); component1Instances = comp.getAllComponentInstances(); comp2 = createComponent(componentInstance.getComponent().getScheduler(), org.apache.hadoop.yarn.service.api.records.Component .RestartPolicyEnum.ON_FAILURE, 2, 1); component2Instances = comp2.getAllComponentInstances(); Map<String, ComponentInstance> succeeded2Instances = new HashMap<>(); for (ComponentInstance component2Instance : component2Instances) { succeeded2Instances.put(component2Instance.getCompInstanceName(), component2Instance); when(component2Instance.getComponent().getSucceededInstances()) .thenReturn(succeeded2Instances.values()); when(component2Instance.getComponent().getNumSucceededInstances()) .thenReturn(new Long(succeeded2Instances.size())); ComponentInstance.handleComponentInstanceRelaunch(component2Instance, componentInstanceEvent, false, containerDiag); } Map<String, ComponentInstance> succeeded1Instances = new HashMap<>(); // 2nd component, already finished. for (ComponentInstance component1Instance : component1Instances) { succeeded1Instances.put(component1Instance.getCompInstanceName(), component1Instance); when(component1Instance.getComponent().getSucceededInstances()) .thenReturn(succeeded1Instances.values()); when(component1Instance.getComponent().getNumSucceededInstances()) .thenReturn(new Long(succeeded1Instances.size())); ComponentInstance.handleComponentInstanceRelaunch(component1Instance, componentInstanceEvent, false, containerDiag); } verify(comp, times(2)).markAsSucceeded(any(ComponentInstance.class)); verify(comp, never()).markAsFailed(any(ComponentInstance.class)); verify(componentInstance.getComponent(), times(0)).reInsertPendingInstance( any(ComponentInstance.class)); verify(serviceScheduler.getTerminationHandler(), times(1)).terminate(eq(0)); // Test case10: 2 components, 2 instances for each // comp2 hasn't finished // comp1 finished. // Service should continue run. comp = createComponent(serviceScheduler, org.apache.hadoop.yarn.service.api.records.Component .RestartPolicyEnum.NEVER, 2, 0); component1Instances = comp.getAllComponentInstances(); comp2 = createComponent(componentInstance.getComponent().getScheduler(), org.apache.hadoop.yarn.service.api.records.Component .RestartPolicyEnum.NEVER, 2, 1); component2Instances = comp2.getAllComponentInstances(); for (ComponentInstance component2Instance : component2Instances) { ComponentInstance.handleComponentInstanceRelaunch(component2Instance, componentInstanceEvent, false, containerDiag); } succeeded1Instances = new HashMap<>(); // 2nd component, already finished. for (ComponentInstance component1Instance : component1Instances) { succeeded1Instances.put(component1Instance.getCompInstanceName(), component1Instance); when(component1Instance.getComponent().getSucceededInstances()) .thenReturn(succeeded1Instances.values()); ComponentInstance.handleComponentInstanceRelaunch(component1Instance, componentInstanceEvent, false, containerDiag); } verify(comp, times(2)).markAsSucceeded(any(ComponentInstance.class)); verify(comp, never()).markAsFailed(any(ComponentInstance.class)); verify(componentInstance.getComponent(), times(0)).reInsertPendingInstance( any(ComponentInstance.class)); verify(serviceScheduler.getTerminationHandler(), never()).terminate(eq(0)); } }
openjdk/jdk8
35,293
jdk/src/solaris/classes/sun/awt/X11/XEmbedCanvasPeer.java
/* * Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package sun.awt.X11; import java.awt.*; import java.awt.dnd.DropTarget; import java.awt.dnd.DropTargetListener; import java.awt.event.*; import sun.awt.*; import sun.awt.AWTAccessor; import sun.util.logging.PlatformLogger; import java.util.*; import static sun.awt.X11.XEmbedHelper.*; import java.security.AccessController; import sun.security.action.GetBooleanAction; public class XEmbedCanvasPeer extends XCanvasPeer implements WindowFocusListener, KeyEventPostProcessor, ModalityListener, WindowIDProvider { private static final PlatformLogger xembedLog = PlatformLogger.getLogger("sun.awt.X11.xembed.XEmbedCanvasPeer"); boolean applicationActive; // Whether the application is active(has focus) XEmbedServer xembed = new XEmbedServer(); // Helper object, contains XEmbed intrinsics Map<Long, AWTKeyStroke> accelerators = new HashMap<Long, AWTKeyStroke>(); // Maps accelerator ID into AWTKeyStroke Map<AWTKeyStroke, Long> accel_lookup = new HashMap<AWTKeyStroke, Long>(); // Maps AWTKeyStroke into accelerator ID Set<GrabbedKey> grabbed_keys = new HashSet<GrabbedKey>(); // A set of keys grabbed by client Object ACCEL_LOCK = accelerators; // Lock object for working with accelerators; Object GRAB_LOCK = grabbed_keys; // Lock object for working with keys grabbed by client XEmbedCanvasPeer() {} XEmbedCanvasPeer(XCreateWindowParams params) { super(params); } XEmbedCanvasPeer(Component target) { super(target); } protected void postInit(XCreateWindowParams params) { super.postInit(params); installActivateListener(); installAcceleratorListener(); installModalityListener(); // XEmbed canvas should be non-traversable. // FIXME: Probably should be removed and enforced setting of it by the users target.setFocusTraversalKeysEnabled(false); } protected void preInit(XCreateWindowParams params) { super.preInit(params); params.put(EVENT_MASK, XConstants.KeyPressMask | XConstants.KeyReleaseMask | XConstants.FocusChangeMask | XConstants.ButtonPressMask | XConstants.ButtonReleaseMask | XConstants.EnterWindowMask | XConstants.LeaveWindowMask | XConstants.PointerMotionMask | XConstants.ButtonMotionMask | XConstants.ExposureMask | XConstants.StructureNotifyMask | XConstants.SubstructureNotifyMask); } void installModalityListener() { ((SunToolkit)Toolkit.getDefaultToolkit()).addModalityListener(this); } void deinstallModalityListener() { ((SunToolkit)Toolkit.getDefaultToolkit()).removeModalityListener(this); } void installAcceleratorListener() { KeyboardFocusManager.getCurrentKeyboardFocusManager().addKeyEventPostProcessor(this); } void deinstallAcceleratorListener() { KeyboardFocusManager.getCurrentKeyboardFocusManager().removeKeyEventPostProcessor(this); } void installActivateListener() { // FIXME: should watch for hierarchy changes Window toplevel = getTopLevel(target); if (toplevel != null) { toplevel.addWindowFocusListener(this); applicationActive = toplevel.isFocused(); } } void deinstallActivateListener() { Window toplevel = getTopLevel(target); if (toplevel != null) { toplevel.removeWindowFocusListener(this); } } boolean isXEmbedActive() { return xembed.handle != 0; } boolean isApplicationActive() { return applicationActive; } void initDispatching() { if (xembedLog.isLoggable(PlatformLogger.Level.FINE)) { xembedLog.fine("Init embedding for " + Long.toHexString(xembed.handle)); } XToolkit.awtLock(); try { XToolkit.addEventDispatcher(xembed.handle, xembed); XlibWrapper.XSelectInput(XToolkit.getDisplay(), xembed.handle, XConstants.StructureNotifyMask | XConstants.PropertyChangeMask); XDropTargetRegistry.getRegistry().registerXEmbedClient(getWindow(), xembed.handle); } finally { XToolkit.awtUnlock(); } xembed.processXEmbedInfo(); notifyChildEmbedded(); } void endDispatching() { if (xembedLog.isLoggable(PlatformLogger.Level.FINE)) { xembedLog.fine("End dispatching for " + Long.toHexString(xembed.handle)); } XToolkit.awtLock(); try { XDropTargetRegistry.getRegistry().unregisterXEmbedClient(getWindow(), xembed.handle); // We can't deselect input since someone else might be interested in it XToolkit.removeEventDispatcher(xembed.handle, xembed); } finally { XToolkit.awtUnlock(); } } void embedChild(long child) { if (xembed.handle != 0) { detachChild(); } xembed.handle = child; initDispatching(); } void childDestroyed() { if (xembedLog.isLoggable(PlatformLogger.Level.FINE)) { xembedLog.fine("Child " + Long.toHexString(xembed.handle) + " has self-destroyed."); } endDispatching(); xembed.handle = 0; } public void handleEvent(AWTEvent e) { super.handleEvent(e); if (isXEmbedActive()) { switch (e.getID()) { case FocusEvent.FOCUS_GAINED: canvasFocusGained((FocusEvent)e); break; case FocusEvent.FOCUS_LOST: canvasFocusLost((FocusEvent)e); break; case KeyEvent.KEY_PRESSED: case KeyEvent.KEY_RELEASED: if (!((InputEvent)e).isConsumed()) { forwardKeyEvent((KeyEvent)e); } break; } } } public void dispatchEvent(XEvent ev) { super.dispatchEvent(ev); switch (ev.get_type()) { case XConstants.CreateNotify: XCreateWindowEvent cr = ev.get_xcreatewindow(); if (xembedLog.isLoggable(PlatformLogger.Level.FINEST)) { xembedLog.finest("Message on embedder: " + cr); } if (xembedLog.isLoggable(PlatformLogger.Level.FINER)) { xembedLog.finer("Create notify for parent " + Long.toHexString(cr.get_parent()) + ", window " + Long.toHexString(cr.get_window())); } embedChild(cr.get_window()); break; case XConstants.DestroyNotify: XDestroyWindowEvent dn = ev.get_xdestroywindow(); if (xembedLog.isLoggable(PlatformLogger.Level.FINEST)) { xembedLog.finest("Message on embedder: " + dn); } if (xembedLog.isLoggable(PlatformLogger.Level.FINER)) { xembedLog.finer("Destroy notify for parent: " + dn); } childDestroyed(); break; case XConstants.ReparentNotify: XReparentEvent rep = ev.get_xreparent(); if (xembedLog.isLoggable(PlatformLogger.Level.FINEST)) { xembedLog.finest("Message on embedder: " + rep); } if (xembedLog.isLoggable(PlatformLogger.Level.FINER)) { xembedLog.finer("Reparent notify for parent " + Long.toHexString(rep.get_parent()) + ", window " + Long.toHexString(rep.get_window()) + ", event " + Long.toHexString(rep.get_event())); } if (rep.get_parent() == getWindow()) { // Reparented into us - embed it embedChild(rep.get_window()); } else { // Reparented out of us - detach it childDestroyed(); } break; } } public Dimension getPreferredSize() { if (isXEmbedActive()) { XToolkit.awtLock(); try { long p_hints = XlibWrapper.XAllocSizeHints(); XSizeHints hints = new XSizeHints(p_hints); XlibWrapper.XGetWMNormalHints(XToolkit.getDisplay(), xembed.handle, p_hints, XlibWrapper.larg1); Dimension res = new Dimension(hints.get_width(), hints.get_height()); XlibWrapper.XFree(p_hints); return res; } finally { XToolkit.awtUnlock(); } } else { return super.getPreferredSize(); } } public Dimension getMinimumSize() { if (isXEmbedActive()) { XToolkit.awtLock(); try { long p_hints = XlibWrapper.XAllocSizeHints(); XSizeHints hints = new XSizeHints(p_hints); XlibWrapper.XGetWMNormalHints(XToolkit.getDisplay(), xembed.handle, p_hints, XlibWrapper.larg1); Dimension res = new Dimension(hints.get_min_width(), hints.get_min_height()); XlibWrapper.XFree(p_hints); return res; } finally { XToolkit.awtUnlock(); } } else { return super.getMinimumSize(); } } public void dispose() { if (isXEmbedActive()) { detachChild(); } deinstallActivateListener(); deinstallModalityListener(); deinstallAcceleratorListener(); // BUG: Focus traversal doesn't become enabled after the one round of embedding //target.setFocusTraversalKeysEnabled(true); super.dispose(); } // Focusable is true in order to enable focus traversal through this Canvas public boolean isFocusable() { return true; } Window getTopLevel(Component comp) { while (comp != null && !(comp instanceof Window)) { comp = comp.getParent(); } return (Window)comp; } Rectangle getClientBounds() { XToolkit.awtLock(); try { XWindowAttributes wattr = new XWindowAttributes(); try { XErrorHandlerUtil.WITH_XERROR_HANDLER(XErrorHandler.IgnoreBadWindowHandler.getInstance()); int status = XlibWrapper.XGetWindowAttributes(XToolkit.getDisplay(), xembed.handle, wattr.pData); XErrorHandlerUtil.RESTORE_XERROR_HANDLER(); if ((status == 0) || ((XErrorHandlerUtil.saved_error != null) && (XErrorHandlerUtil.saved_error.get_error_code() != XConstants.Success))) { return null; } return new Rectangle(wattr.get_x(), wattr.get_y(), wattr.get_width(), wattr.get_height()); } finally { wattr.dispose(); } } finally { XToolkit.awtUnlock(); } } void childResized() { if (xembedLog.isLoggable(PlatformLogger.Level.FINER)) { Rectangle bounds = getClientBounds(); xembedLog.finer("Child resized: " + bounds); // It is not required to update embedder's size when client size changes // However, since there is no any means to get client size it seems to be the // only way to provide it. However, it contradicts with Java layout concept - // so it is disabled for now. // Rectangle my_bounds = getBounds(); // setBounds(my_bounds.x, my_bounds.y, bounds.width, bounds.height, SET_BOUNDS); } XToolkit.postEvent(XToolkit.targetToAppContext(target), new ComponentEvent(target, ComponentEvent.COMPONENT_RESIZED)); } void focusNext() { if (isXEmbedActive()) { xembedLog.fine("Requesting focus for the next component after embedder"); postEvent(new InvocationEvent(target, new Runnable() { public void run() { KeyboardFocusManager.getCurrentKeyboardFocusManager().focusNextComponent(target); } })); } else { xembedLog.fine("XEmbed is not active - denying focus next"); } } void focusPrev() { if (isXEmbedActive()) { xembedLog.fine("Requesting focus for the next component after embedder"); postEvent(new InvocationEvent(target, new Runnable() { public void run() { KeyboardFocusManager.getCurrentKeyboardFocusManager().focusPreviousComponent(target); } })); } else { xembedLog.fine("XEmbed is not active - denying focus prev"); } } void requestXEmbedFocus() { if (isXEmbedActive()) { xembedLog.fine("Requesting focus for client"); postEvent(new InvocationEvent(target, new Runnable() { public void run() { target.requestFocus(); } })); } else { xembedLog.fine("XEmbed is not active - denying request focus"); } } void notifyChildEmbedded() { xembed.sendMessage(xembed.handle, XEMBED_EMBEDDED_NOTIFY, getWindow(), Math.min(xembed.version, XEMBED_VERSION), 0); if (isApplicationActive()) { xembedLog.fine("Sending WINDOW_ACTIVATE during initialization"); xembed.sendMessage(xembed.handle, XEMBED_WINDOW_ACTIVATE); if (hasFocus()) { xembedLog.fine("Sending FOCUS_GAINED during initialization"); xembed.sendMessage(xembed.handle, XEMBED_FOCUS_IN, XEMBED_FOCUS_CURRENT, 0, 0); } } } void detachChild() { if (xembedLog.isLoggable(PlatformLogger.Level.FINE)) { xembedLog.fine("Detaching child " + Long.toHexString(xembed.handle)); } /** * XEmbed specification: * "The embedder can unmap the client and reparent the client window to the root window. If the * client receives an ReparentNotify event, it should check the parent field of the XReparentEvent * structure. If this is the root window of the window's screen, then the protocol is finished and * there is no further interaction. If it is a window other than the root window, then the protocol * continues with the new parent acting as the embedder window." */ XToolkit.awtLock(); try { XlibWrapper.XUnmapWindow(XToolkit.getDisplay(), xembed.handle); XlibWrapper.XReparentWindow(XToolkit.getDisplay(), xembed.handle, XToolkit.getDefaultRootWindow(), 0, 0); } finally { XToolkit.awtUnlock(); } endDispatching(); xembed.handle = 0; } public void windowGainedFocus(WindowEvent e) { applicationActive = true; if (isXEmbedActive()) { xembedLog.fine("Sending WINDOW_ACTIVATE"); xembed.sendMessage(xembed.handle, XEMBED_WINDOW_ACTIVATE); } } public void windowLostFocus(WindowEvent e) { applicationActive = false; if (isXEmbedActive()) { xembedLog.fine("Sending WINDOW_DEACTIVATE"); xembed.sendMessage(xembed.handle, XEMBED_WINDOW_DEACTIVATE); } } void canvasFocusGained(FocusEvent e) { if (isXEmbedActive()) { xembedLog.fine("Forwarding FOCUS_GAINED"); int flavor = XEMBED_FOCUS_CURRENT; if (e instanceof CausedFocusEvent) { CausedFocusEvent ce = (CausedFocusEvent)e; if (ce.getCause() == CausedFocusEvent.Cause.TRAVERSAL_FORWARD) { flavor = XEMBED_FOCUS_FIRST; } else if (ce.getCause() == CausedFocusEvent.Cause.TRAVERSAL_BACKWARD) { flavor = XEMBED_FOCUS_LAST; } } xembed.sendMessage(xembed.handle, XEMBED_FOCUS_IN, flavor, 0, 0); } } void canvasFocusLost(FocusEvent e) { if (isXEmbedActive() && !e.isTemporary()) { xembedLog.fine("Forwarding FOCUS_LOST"); int num = 0; if (AccessController.doPrivileged(new GetBooleanAction("sun.awt.xembed.testing"))) { Component opp = e.getOppositeComponent(); try { num = Integer.parseInt(opp.getName()); } catch (NumberFormatException nfe) { } } xembed.sendMessage(xembed.handle, XEMBED_FOCUS_OUT, num, 0, 0); } } static byte[] getBData(KeyEvent e) { return AWTAccessor.getAWTEventAccessor().getBData(e); } void forwardKeyEvent(KeyEvent e) { xembedLog.fine("Try to forward key event"); byte[] bdata = getBData(e); long data = Native.toData(bdata); if (data == 0) { return; } try { XKeyEvent ke = new XKeyEvent(data); ke.set_window(xembed.handle); if (xembedLog.isLoggable(PlatformLogger.Level.FINE)) { xembedLog.fine("Forwarding native key event: " + ke); } XToolkit.awtLock(); try { XlibWrapper.XSendEvent(XToolkit.getDisplay(), xembed.handle, false, XConstants.NoEventMask, data); } finally { XToolkit.awtUnlock(); } } finally { XlibWrapper.unsafe.freeMemory(data); } } /** * Grab/ungrab key functionality is an unofficial API supported by * GTK. Unfortunately, it doesn't support accelerator API, so, * since this is the ONLY shortcut-processing API available, we * must support it. See XEmbed.NON_STANDARD_XEMBED_GTK_* * messages. The format of these messages is as follows: * - request from client: * data[1] = NON_STANDARD_XEMBED_GTK_GRAB_KEY or NON_STANDARD_XEMBED_GTK_UNGRAB_KEY * data[3] = X keysym * data[4] = X modifiers * * - response from server (in case the grabbed key has been pressed): * forwarded XKeyEvent that matches keysym/modifiers pair */ void grabKey(final long keysym, final long modifiers) { postEvent(new InvocationEvent(target, new Runnable() { public void run() { GrabbedKey grab = new GrabbedKey(keysym, modifiers); if (xembedLog.isLoggable(PlatformLogger.Level.FINE)) { xembedLog.fine("Grabbing key: " + grab); } synchronized(GRAB_LOCK) { grabbed_keys.add(grab); } } })); } void ungrabKey(final long keysym, final long modifiers) { postEvent(new InvocationEvent(target, new Runnable() { public void run() { GrabbedKey grab = new GrabbedKey(keysym, modifiers); if (xembedLog.isLoggable(PlatformLogger.Level.FINE)) { xembedLog.fine("UnGrabbing key: " + grab); } synchronized(GRAB_LOCK) { grabbed_keys.remove(grab); } } })); } void registerAccelerator(final long accel_id, final long keysym, final long modifiers) { postEvent(new InvocationEvent(target, new Runnable() { public void run() { AWTKeyStroke stroke = xembed.getKeyStrokeForKeySym(keysym, modifiers); if (stroke != null) { if (xembedLog.isLoggable(PlatformLogger.Level.FINE)) { xembedLog.fine("Registering accelerator " + accel_id + " for " + stroke); } synchronized(ACCEL_LOCK) { accelerators.put(accel_id, stroke); accel_lookup.put(stroke, accel_id); } } propogateRegisterAccelerator(stroke); } })); } void unregisterAccelerator(final long accel_id) { postEvent(new InvocationEvent(target, new Runnable() { public void run() { AWTKeyStroke stroke = null; synchronized(ACCEL_LOCK) { stroke = accelerators.get(accel_id); if (stroke != null) { if (xembedLog.isLoggable(PlatformLogger.Level.FINE)) { xembedLog.fine("Unregistering accelerator: " + accel_id); } accelerators.remove(accel_id); accel_lookup.remove(stroke); // FIXME: How about several accelerators with the same stroke? } } propogateUnRegisterAccelerator(stroke); } })); } void propogateRegisterAccelerator(AWTKeyStroke stroke) { // Find the top-level and see if it is XEmbed client. If so, ask him to // register the accelerator XWindowPeer parent = getToplevelXWindow(); if (parent != null && parent instanceof XEmbeddedFramePeer) { XEmbeddedFramePeer embedded = (XEmbeddedFramePeer)parent; embedded.registerAccelerator(stroke); } } void propogateUnRegisterAccelerator(AWTKeyStroke stroke) { // Find the top-level and see if it is XEmbed client. If so, ask him to // register the accelerator XWindowPeer parent = getToplevelXWindow(); if (parent != null && parent instanceof XEmbeddedFramePeer) { XEmbeddedFramePeer embedded = (XEmbeddedFramePeer)parent; embedded.unregisterAccelerator(stroke); } } public boolean postProcessKeyEvent(KeyEvent e) { // Processing events only if we are in the focused window but // we are not focus owner since otherwise we will get // duplicate shortcut events in the client - one is from // activate_accelerator, another from forwarded event // FIXME: This is probably an incompatibility, protocol // doesn't say anything about disable accelerators when client // is focused. XWindowPeer parent = getToplevelXWindow(); if (parent == null || !((Window)parent.getTarget()).isFocused() || target.isFocusOwner()) { return false; } boolean result = false; if (xembedLog.isLoggable(PlatformLogger.Level.FINER)) { xembedLog.finer("Post-processing event " + e); } // Process ACCELERATORS AWTKeyStroke stroke = AWTKeyStroke.getAWTKeyStrokeForEvent(e); long accel_id = 0; boolean exists = false; synchronized(ACCEL_LOCK) { exists = accel_lookup.containsKey(stroke); if (exists) { accel_id = accel_lookup.get(stroke).longValue(); } } if (exists) { if (xembedLog.isLoggable(PlatformLogger.Level.FINE)) { xembedLog.fine("Activating accelerator " + accel_id); } xembed.sendMessage(xembed.handle, XEMBED_ACTIVATE_ACCELERATOR, accel_id, 0, 0); // FIXME: How about overloaded? result = true; } // Process Grabs, unofficial GTK feature exists = false; GrabbedKey key = new GrabbedKey(e); synchronized(GRAB_LOCK) { exists = grabbed_keys.contains(key); } if (exists) { if (xembedLog.isLoggable(PlatformLogger.Level.FINE)) { xembedLog.fine("Forwarding grabbed key " + e); } forwardKeyEvent(e); result = true; } return result; } public void modalityPushed(ModalityEvent ev) { xembed.sendMessage(xembed.handle, XEMBED_MODALITY_ON); } public void modalityPopped(ModalityEvent ev) { xembed.sendMessage(xembed.handle, XEMBED_MODALITY_OFF); } public void handleClientMessage(XEvent xev) { super.handleClientMessage(xev); XClientMessageEvent msg = xev.get_xclient(); if (xembedLog.isLoggable(PlatformLogger.Level.FINER)) { xembedLog.finer("Client message to embedder: " + msg); } if (msg.get_message_type() == xembed.XEmbed.getAtom()) { if (xembedLog.isLoggable(PlatformLogger.Level.FINE)) { xembedLog.fine(xembed.XEmbedMessageToString(msg)); } } if (isXEmbedActive()) { switch ((int)msg.get_data(1)) { case XEMBED_REQUEST_FOCUS: requestXEmbedFocus(); break; case XEMBED_FOCUS_NEXT: focusNext(); break; case XEMBED_FOCUS_PREV: focusPrev(); break; case XEMBED_REGISTER_ACCELERATOR: registerAccelerator(msg.get_data(2), msg.get_data(3), msg.get_data(4)); break; case XEMBED_UNREGISTER_ACCELERATOR: unregisterAccelerator(msg.get_data(2)); break; case NON_STANDARD_XEMBED_GTK_GRAB_KEY: grabKey(msg.get_data(3), msg.get_data(4)); break; case NON_STANDARD_XEMBED_GTK_UNGRAB_KEY: ungrabKey(msg.get_data(3), msg.get_data(4)); break; } } else { xembedLog.finer("But XEmbed is not Active!"); } } private static class XEmbedDropTarget extends DropTarget { public void addDropTargetListener(DropTargetListener dtl) throws TooManyListenersException { // Drop target listeners registered with this target will never be // notified, since all drag notifications are routed to the XEmbed // client. To avoid confusion we prohibit listeners registration // by throwing TooManyListenersException as if there is a listener // registered with this target already. throw new TooManyListenersException(); } } public void setXEmbedDropTarget() { // Register a drop site on the top level. Runnable r = new Runnable() { public void run() { target.setDropTarget(new XEmbedDropTarget()); } }; SunToolkit.executeOnEventHandlerThread(target, r); } public void removeXEmbedDropTarget() { // Unregister a drop site on the top level. Runnable r = new Runnable() { public void run() { if (target.getDropTarget() instanceof XEmbedDropTarget) { target.setDropTarget(null); } } }; SunToolkit.executeOnEventHandlerThread(target, r); } public boolean processXEmbedDnDEvent(long ctxt, int eventID) { if (xembedLog.isLoggable(PlatformLogger.Level.FINEST)) { xembedLog.finest(" Drop target=" + target.getDropTarget()); } if (target.getDropTarget() instanceof XEmbedDropTarget) { AppContext appContext = XToolkit.targetToAppContext(getTarget()); XDropTargetContextPeer peer = XDropTargetContextPeer.getPeer(appContext); peer.forwardEventToEmbedded(xembed.handle, ctxt, eventID); return true; } else { return false; } } class XEmbedServer extends XEmbedHelper implements XEventDispatcher { long handle; // Handle to XEmbed client long version; long flags; boolean processXEmbedInfo() { long xembed_info_data = Native.allocateLongArray(2); try { if (!XEmbedInfo.getAtomData(handle, xembed_info_data, 2)) { // No more XEMBED_INFO? This is not XEmbed client! // Unfortunately this is the initial state of the most clients // FIXME: add 5-state processing //childDestroyed(); xembedLog.finer("Unable to get XEMBED_INFO atom data"); return false; } version = Native.getCard32(xembed_info_data, 0); flags = Native.getCard32(xembed_info_data, 1); boolean new_mapped = (flags & XEMBED_MAPPED) != 0; boolean currently_mapped = XlibUtil.getWindowMapState(handle) != XConstants.IsUnmapped; if (new_mapped != currently_mapped) { if (xembedLog.isLoggable(PlatformLogger.Level.FINER)) { xembedLog.finer("Mapping state of the client has changed, old state: " + currently_mapped + ", new state: " + new_mapped); } if (new_mapped) { XToolkit.awtLock(); try { XlibWrapper.XMapWindow(XToolkit.getDisplay(), handle); } finally { XToolkit.awtUnlock(); } } else { XToolkit.awtLock(); try { XlibWrapper.XUnmapWindow(XToolkit.getDisplay(), handle); } finally { XToolkit.awtUnlock(); } } } else { if (xembedLog.isLoggable(PlatformLogger.Level.FINER)) { xembedLog.finer("Mapping state didn't change, mapped: " + currently_mapped); } } return true; } finally { XlibWrapper.unsafe.freeMemory(xembed_info_data); } } public void handlePropertyNotify(XEvent xev) { if (isXEmbedActive()) { XPropertyEvent ev = xev.get_xproperty(); if (xembedLog.isLoggable(PlatformLogger.Level.FINER)) { xembedLog.finer("Property change on client: " + ev); } if (ev.get_atom() == XAtom.XA_WM_NORMAL_HINTS) { childResized(); } else if (ev.get_atom() == XEmbedInfo.getAtom()) { processXEmbedInfo(); } else if (ev.get_atom() == XDnDConstants.XA_XdndAware.getAtom()) { XDropTargetRegistry.getRegistry().unregisterXEmbedClient(getWindow(), xembed.handle); if (ev.get_state() == XConstants.PropertyNewValue) { XDropTargetRegistry.getRegistry().registerXEmbedClient(getWindow(), xembed.handle); } } } else { xembedLog.finer("XEmbed is not active"); } } void handleConfigureNotify(XEvent xev) { if (isXEmbedActive()) { XConfigureEvent ev = xev.get_xconfigure(); if (xembedLog.isLoggable(PlatformLogger.Level.FINER)) { xembedLog.finer("Bounds change on client: " + ev); } if (xev.get_xany().get_window() == handle) { childResized(); } } } public void dispatchEvent(XEvent xev) { int type = xev.get_type(); switch (type) { case XConstants.PropertyNotify: handlePropertyNotify(xev); break; case XConstants.ConfigureNotify: handleConfigureNotify(xev); break; case XConstants.ClientMessage: handleClientMessage(xev); break; } } } static class GrabbedKey { long keysym; long modifiers; GrabbedKey(long keysym, long modifiers) { this.keysym = keysym; this.modifiers = modifiers; } GrabbedKey(KeyEvent ev) { init(ev); } private void init(KeyEvent e) { byte[] bdata = getBData(e); long data = Native.toData(bdata); if (data == 0) { return; } try { XToolkit.awtLock(); try { keysym = XWindow.getKeySymForAWTKeyCode(e.getKeyCode()); } finally { XToolkit.awtUnlock(); } XKeyEvent ke = new XKeyEvent(data); // We recognize only these masks modifiers = ke.get_state() & (XConstants.ShiftMask | XConstants.ControlMask | XConstants.LockMask); if (xembedLog.isLoggable(PlatformLogger.Level.FINEST)) { xembedLog.finest("Mapped " + e + " to " + this); } } finally { XlibWrapper.unsafe.freeMemory(data); } } public int hashCode() { return (int)keysym & 0xFFFFFFFF; } public boolean equals(Object o) { if (!(o instanceof GrabbedKey)) { return false; } GrabbedKey key = (GrabbedKey)o; return (keysym == key.keysym && modifiers == key.modifiers); } public String toString() { return "Key combination[keysym=" + keysym + ", mods=" + modifiers + "]"; } } }
apache/flink
35,250
flink-runtime/src/test/java/org/apache/flink/runtime/metrics/MetricRegistryImplTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.metrics; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.EventOptions; import org.apache.flink.configuration.MetricOptions; import org.apache.flink.configuration.TraceOptions; import org.apache.flink.events.Event; import org.apache.flink.events.EventBuilder; import org.apache.flink.events.reporter.EventReporter; import org.apache.flink.metrics.Counter; import org.apache.flink.metrics.Metric; import org.apache.flink.metrics.MetricConfig; import org.apache.flink.metrics.MetricGroup; import org.apache.flink.metrics.SimpleCounter; import org.apache.flink.metrics.reporter.Scheduled; import org.apache.flink.metrics.util.TestCounter; import org.apache.flink.metrics.util.TestMeter; import org.apache.flink.runtime.clusterframework.types.ResourceID; import org.apache.flink.runtime.concurrent.ManuallyTriggeredScheduledExecutorService; import org.apache.flink.runtime.metrics.CollectingMetricsReporter.MetricGroupAndName; import org.apache.flink.runtime.metrics.dump.MetricDumpSerialization; import org.apache.flink.runtime.metrics.dump.MetricQueryService; import org.apache.flink.runtime.metrics.filter.DefaultReporterFilters; import org.apache.flink.runtime.metrics.groups.MetricGroupTest; import org.apache.flink.runtime.metrics.groups.TaskManagerMetricGroup; import org.apache.flink.runtime.metrics.groups.UnregisteredMetricGroups; import org.apache.flink.runtime.metrics.scope.ScopeFormats; import org.apache.flink.runtime.metrics.util.TestEventReporter; import org.apache.flink.runtime.metrics.util.TestReporter; import org.apache.flink.runtime.metrics.util.TestTraceReporter; import org.apache.flink.runtime.rpc.RpcService; import org.apache.flink.runtime.rpc.TestingRpcService; import org.apache.flink.runtime.webmonitor.retriever.MetricQueryServiceGateway; import org.apache.flink.traces.Span; import org.apache.flink.traces.SpanBuilder; import org.apache.flink.traces.reporter.TraceReporter; import org.apache.flink.shaded.guava33.com.google.common.collect.Iterators; import org.junit.jupiter.api.Test; import javax.annotation.Nullable; import java.time.Duration; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import static org.apache.flink.util.Preconditions.checkNotNull; import static org.assertj.core.api.Assertions.assertThat; /** Tests for the {@link MetricRegistryImpl}. */ class MetricRegistryImplTest { private static final char GLOBAL_DEFAULT_DELIMITER = '.'; @Test void testIsShutdown() throws Exception { MetricRegistryImpl metricRegistry = new MetricRegistryImpl( MetricRegistryTestUtils.defaultMetricRegistryConfiguration()); assertThat(metricRegistry.isShutdown()).isFalse(); metricRegistry.closeAsync().get(); assertThat(metricRegistry.isShutdown()).isTrue(); } @Test void testMetricQueryServiceSetup() throws Exception { MetricRegistryImpl metricRegistry = new MetricRegistryImpl( MetricRegistryTestUtils.defaultMetricRegistryConfiguration()); assertThat(metricRegistry.getMetricQueryServiceGatewayRpcAddress()).isNull(); metricRegistry.startQueryService(new TestingRpcService(), new ResourceID("mqs")); MetricQueryServiceGateway metricQueryServiceGateway = metricRegistry.getMetricQueryServiceGateway(); assertThat(metricQueryServiceGateway).isNotNull(); metricRegistry.register( new SimpleCounter(), "counter", UnregisteredMetricGroups.createUnregisteredTaskManagerMetricGroup()); boolean metricsSuccessfullyQueried = false; for (int x = 0; x < 10; x++) { MetricDumpSerialization.MetricSerializationResult metricSerializationResult = metricQueryServiceGateway .queryMetrics(Duration.ofSeconds(5)) .get(5, TimeUnit.SECONDS); if (metricSerializationResult.numCounters == 1) { metricsSuccessfullyQueried = true; } else { Thread.sleep(50); } } assertThat(metricsSuccessfullyQueried) .as("metrics query did not return expected result") .isTrue(); } /** * Verifies that reporters implementing the Scheduled interface are regularly called to report * the metrics. */ @Test void testReporterScheduling() throws Exception { MetricConfig config = new MetricConfig(); config.setProperty("arg1", "hello"); config.setProperty(MetricOptions.REPORTER_INTERVAL.key(), "50 MILLISECONDS"); final ReportCountingReporter reporter = new ReportCountingReporter(); MetricRegistryImpl registry = new MetricRegistryImpl( MetricRegistryTestUtils.defaultMetricRegistryConfiguration(), Collections.singletonList( ReporterSetupBuilder.METRIC_SETUP_BUILDER.forReporter( "test", config, reporter))); long start = System.currentTimeMillis(); // only start counting from now on reporter.resetCount(); for (int x = 0; x < 10; x++) { Thread.sleep(100); int reportCount = reporter.getReportCount(); long curT = System.currentTimeMillis(); /** * Within a given time-frame T only T/500 reports may be triggered due to the interval * between reports. This value however does not not take the first triggered report into * account (=> +1). Furthermore we have to account for the mis-alignment between reports * being triggered and our time measurement (=> +1); for T=200 a total of 4-6 reports * may have been triggered depending on whether the end of the interval for the first * reports ends before or after T=50. */ long maxAllowedReports = (curT - start) / 50 + 2; assertThat(maxAllowedReports) .as("Too many reports were triggered.") .isGreaterThanOrEqualTo(reportCount); } assertThat(reporter.getReportCount()).as("No report was triggered.").isGreaterThan(0); registry.closeAsync().get(); } @Test void testReporterIntervalParsingErrorFallsBackToDefaultValue() throws Exception { MetricConfig config = new MetricConfig(); // in a prior implementation the time amount was applied even if the time unit was invalid // in this case this would imply using 1 SECOND as the interval (seconds is the default) config.setProperty(MetricOptions.REPORTER_INTERVAL.key(), "1 UNICORN"); final ManuallyTriggeredScheduledExecutorService manuallyTriggeredScheduledExecutorService = new ManuallyTriggeredScheduledExecutorService(); MetricRegistryImpl registry = new MetricRegistryImpl( MetricRegistryTestUtils.defaultMetricRegistryConfiguration(), Collections.singletonList( ReporterSetupBuilder.METRIC_SETUP_BUILDER.forReporter( "test", config, new ReportCountingReporter())), manuallyTriggeredScheduledExecutorService); try { Collection<ScheduledFuture<?>> scheduledTasks = manuallyTriggeredScheduledExecutorService.getActiveScheduledTasks(); ScheduledFuture<?> reportTask = Iterators.getOnlyElement(scheduledTasks.iterator()); assertThat(reportTask.getDelay(TimeUnit.SECONDS)) .isEqualTo(MetricOptions.REPORTER_INTERVAL.defaultValue().getSeconds()); } finally { registry.closeAsync().get(); } } /** Reporter that exposes how often report() was called. */ private static class ReportCountingReporter extends TestReporter implements Scheduled { private int reportCount = 0; @Override public void report() { reportCount++; } public int getReportCount() { return reportCount; } public void resetCount() { reportCount = 0; } } /** Verifies that reporters are notified of added/removed metrics. */ @Test void testReporterNotifications() throws Exception { final NotificationCapturingMetricReporter reporter1 = new NotificationCapturingMetricReporter(); final NotificationCapturingMetricReporter reporter2 = new NotificationCapturingMetricReporter(); final NotificationCapturingEventReporter eventReporter1 = new NotificationCapturingEventReporter(); final NotificationCapturingEventReporter eventReporter2 = new NotificationCapturingEventReporter(); final NotificationCapturingSpanReporter spanReporter1 = new NotificationCapturingSpanReporter(); final NotificationCapturingSpanReporter spanReporter2 = new NotificationCapturingSpanReporter(); MetricRegistryImpl registry = new MetricRegistryImpl( MetricRegistryTestUtils.defaultMetricRegistryConfiguration(), Arrays.asList( ReporterSetupBuilder.METRIC_SETUP_BUILDER.forReporter( "test1", reporter1), ReporterSetupBuilder.METRIC_SETUP_BUILDER.forReporter( "test2", reporter2)), Arrays.asList( ReporterSetupBuilder.TRACE_SETUP_BUILDER.forReporter( "trace_test1", spanReporter1), ReporterSetupBuilder.TRACE_SETUP_BUILDER.forReporter( "trace_test2", spanReporter2)), Arrays.asList( ReporterSetupBuilder.EVENT_SETUP_BUILDER.forReporter( "event_test1", eventReporter1), ReporterSetupBuilder.EVENT_SETUP_BUILDER.forReporter( "event_test2", eventReporter2))); TaskManagerMetricGroup root = TaskManagerMetricGroup.createTaskManagerMetricGroup( registry, "host", new ResourceID("id")); root.counter("rootCounter"); root.addEvent(Event.builder(getClass(), "TestEvent")); root.addSpan(Span.builder(getClass(), "TestSpan")); assertThat(reporter1.getLastAddedMetric()).containsInstanceOf(Counter.class); assertThat(reporter1.getLastAddedMetricName()).hasValue("rootCounter"); assertThat(eventReporter1.getLastAddedEvent().map(Event::getName)).hasValue("TestEvent"); assertThat(spanReporter1.getLastAddedSpan().map(Span::getName)).hasValue("TestSpan"); assertThat(reporter2.getLastAddedMetric()).containsInstanceOf(Counter.class); assertThat(reporter2.getLastAddedMetricName()).hasValue("rootCounter"); assertThat(eventReporter2.getLastAddedEvent().map(Event::getName)).hasValue("TestEvent"); assertThat(spanReporter2.getLastAddedSpan().map(Span::getName)).hasValue("TestSpan"); root.close(); assertThat(reporter1.getLastRemovedMetric()).containsInstanceOf(Counter.class); assertThat(reporter1.getLastRemovedMetricName()).hasValue("rootCounter"); assertThat(reporter2.getLastRemovedMetric()).containsInstanceOf(Counter.class); assertThat(reporter2.getLastRemovedMetricName()).hasValue("rootCounter"); registry.closeAsync().get(); } /** * Reporter that exposes the name and metric instance of the last metric that was added or * removed. */ private static class NotificationCapturingMetricReporter extends TestReporter { @Nullable private Metric addedMetric; @Nullable private String addedMetricName; @Nullable private Metric removedMetric; @Nullable private String removedMetricName; @Override public void notifyOfAddedMetric(Metric metric, String metricName, MetricGroup group) { addedMetric = metric; addedMetricName = metricName; } @Override public void notifyOfRemovedMetric(Metric metric, String metricName, MetricGroup group) { removedMetric = metric; removedMetricName = metricName; } public Optional<Metric> getLastAddedMetric() { return Optional.ofNullable(addedMetric); } public Optional<String> getLastAddedMetricName() { return Optional.ofNullable(addedMetricName); } public Optional<Metric> getLastRemovedMetric() { return Optional.ofNullable(removedMetric); } public Optional<String> getLastRemovedMetricName() { return Optional.ofNullable(removedMetricName); } } private static class NotificationCapturingEventReporter extends TestEventReporter { @Nullable private Event addedEvent; @Override public void notifyOfAddedEvent(Event event) { this.addedEvent = event; } public Optional<Event> getLastAddedEvent() { return Optional.ofNullable(addedEvent); } } private static class NotificationCapturingSpanReporter extends TestTraceReporter { @Nullable private Span addedSpan; @Override public void notifyOfAddedSpan(Span span) { this.addedSpan = span; } public Optional<Span> getLastAddedSpan() { return Optional.ofNullable(addedSpan); } } /** Verifies that the scope configuration is properly extracted. */ @Test void testScopeConfig() { Configuration config = new Configuration(); config.set(MetricOptions.SCOPE_NAMING_TM, "A"); config.set(MetricOptions.SCOPE_NAMING_TM_JOB, "B"); config.set(MetricOptions.SCOPE_NAMING_TASK, "C"); config.set(MetricOptions.SCOPE_NAMING_OPERATOR, "D"); ScopeFormats scopeConfig = ScopeFormats.fromConfig(config); assertThat(scopeConfig.getTaskManagerFormat().format()).isEqualTo("A"); assertThat(scopeConfig.getTaskManagerJobFormat().format()).isEqualTo("B"); assertThat(scopeConfig.getTaskFormat().format()).isEqualTo("C"); assertThat(scopeConfig.getOperatorFormat().format()).isEqualTo("D"); } @Test void testConfigurableDelimiter() throws Exception { Configuration config = new Configuration(); config.set(MetricOptions.SCOPE_DELIMITER, "_"); config.set(MetricOptions.SCOPE_NAMING_TM, "A.B.C.D.E"); MetricRegistryImpl registry = new MetricRegistryImpl( MetricRegistryTestUtils.fromConfiguration(config), ReporterSetupBuilder.METRIC_SETUP_BUILDER.fromConfiguration( config, DefaultReporterFilters::metricsFromConfiguration, null)); TaskManagerMetricGroup tmGroup = TaskManagerMetricGroup.createTaskManagerMetricGroup( registry, "host", new ResourceID("id")); assertThat(tmGroup.getMetricIdentifier("name")).isEqualTo("A_B_C_D_E_name"); registry.closeAsync().get(); } @Test void testConfigurableDelimiterForReporters() throws Exception { MetricConfig config1 = new MetricConfig(); config1.setProperty(MetricOptions.REPORTER_SCOPE_DELIMITER.key(), "_"); MetricConfig config2 = new MetricConfig(); config2.setProperty(MetricOptions.REPORTER_SCOPE_DELIMITER.key(), "-"); MetricConfig config3 = new MetricConfig(); config3.setProperty(MetricOptions.REPORTER_SCOPE_DELIMITER.key(), "AA"); MetricConfig traceConfig1 = new MetricConfig(); traceConfig1.setProperty(TraceOptions.REPORTER_SCOPE_DELIMITER.key(), "_"); MetricConfig traceConfig2 = new MetricConfig(); traceConfig2.setProperty(TraceOptions.REPORTER_SCOPE_DELIMITER.key(), "-"); MetricConfig traceConfig3 = new MetricConfig(); traceConfig3.setProperty(TraceOptions.REPORTER_SCOPE_DELIMITER.key(), "AA"); MetricConfig eventConfig1 = new MetricConfig(); eventConfig1.setProperty(EventOptions.REPORTER_SCOPE_DELIMITER.key(), "_"); MetricConfig eventConfig2 = new MetricConfig(); eventConfig2.setProperty(EventOptions.REPORTER_SCOPE_DELIMITER.key(), "-"); MetricConfig eventConfig3 = new MetricConfig(); eventConfig3.setProperty(EventOptions.REPORTER_SCOPE_DELIMITER.key(), "AA"); MetricRegistryImpl registry = new MetricRegistryImpl( MetricRegistryTestUtils.defaultMetricRegistryConfiguration(), Arrays.asList( ReporterSetupBuilder.METRIC_SETUP_BUILDER.forReporter( "test1", config1, new TestReporter()), ReporterSetupBuilder.METRIC_SETUP_BUILDER.forReporter( "test2", config2, new TestReporter()), ReporterSetupBuilder.METRIC_SETUP_BUILDER.forReporter( "test3", config3, new TestReporter())), Arrays.asList( ReporterSetupBuilder.TRACE_SETUP_BUILDER.forReporter( "traceTest1", traceConfig1, new TestTraceReporter()), ReporterSetupBuilder.TRACE_SETUP_BUILDER.forReporter( "traceTest2", traceConfig2, new TestTraceReporter()), ReporterSetupBuilder.TRACE_SETUP_BUILDER.forReporter( "traceTest3", traceConfig3, new TestTraceReporter())), Arrays.asList( ReporterSetupBuilder.EVENT_SETUP_BUILDER.forReporter( "eventTest1", eventConfig1, new TestEventReporter()), ReporterSetupBuilder.EVENT_SETUP_BUILDER.forReporter( "eventTest2", eventConfig2, new TestEventReporter()), ReporterSetupBuilder.EVENT_SETUP_BUILDER.forReporter( "eventTest3", eventConfig3, new TestEventReporter()))); assertThat(registry.getDelimiter()).isEqualTo(GLOBAL_DEFAULT_DELIMITER); assertThat(registry.getDelimiter(0)).isEqualTo('_'); assertThat(registry.getDelimiter(1)).isEqualTo('-'); assertThat(registry.getDelimiter(2)).isEqualTo(GLOBAL_DEFAULT_DELIMITER); assertThat(registry.getDelimiter(3)).isEqualTo(GLOBAL_DEFAULT_DELIMITER); assertThat(registry.getDelimiter(-1)).isEqualTo(GLOBAL_DEFAULT_DELIMITER); List<MetricRegistryImpl.ReporterAndSettings<TraceReporter, SpanBuilder>> traceReporters = registry.getTraceReporters(); assertThat(traceReporters.get(0).getSettings().getDelimiter()).isEqualTo('_'); assertThat(traceReporters.get(1).getSettings().getDelimiter()).isEqualTo('-'); assertThat(traceReporters.get(2).getSettings().getDelimiter()) .isEqualTo(GLOBAL_DEFAULT_DELIMITER); List<MetricRegistryImpl.ReporterAndSettings<EventReporter, EventBuilder>> eventReporters = registry.getEventReporters(); assertThat(traceReporters.get(0).getSettings().getDelimiter()).isEqualTo('_'); assertThat(traceReporters.get(1).getSettings().getDelimiter()).isEqualTo('-'); assertThat(traceReporters.get(2).getSettings().getDelimiter()) .isEqualTo(GLOBAL_DEFAULT_DELIMITER); registry.closeAsync().get(); } @Test void testConfigurableDelimiterForReportersInGroup() throws Exception { String name = "C"; MetricConfig config1 = new MetricConfig(); config1.setProperty(MetricOptions.REPORTER_SCOPE_DELIMITER.key(), "_"); MetricConfig config2 = new MetricConfig(); config2.setProperty(MetricOptions.REPORTER_SCOPE_DELIMITER.key(), "-"); MetricConfig config3 = new MetricConfig(); config3.setProperty(MetricOptions.REPORTER_SCOPE_DELIMITER.key(), "AA"); Configuration config = new Configuration(); config.set(MetricOptions.SCOPE_NAMING_TM, "A.B"); List<ReporterSetup> reporterConfigurations = Arrays.asList( ReporterSetupBuilder.METRIC_SETUP_BUILDER.forReporter( "test1", config1, new CollectingMetricsReporter()), ReporterSetupBuilder.METRIC_SETUP_BUILDER.forReporter( "test2", config2, new CollectingMetricsReporter()), ReporterSetupBuilder.METRIC_SETUP_BUILDER.forReporter( "test3", config3, new CollectingMetricsReporter()), ReporterSetupBuilder.METRIC_SETUP_BUILDER.forReporter( "test4", new CollectingMetricsReporter())); MetricRegistryImpl registry = new MetricRegistryImpl( MetricRegistryTestUtils.fromConfiguration(config), reporterConfigurations); TaskManagerMetricGroup group = TaskManagerMetricGroup.createTaskManagerMetricGroup( registry, "host", new ResourceID("id")); group.counter(name); group.close(); registry.closeAsync().get(); for (ReporterSetup cfg : reporterConfigurations) { String delimiter = cfg.getConfiguration() .getProperty(MetricOptions.REPORTER_SCOPE_DELIMITER.key()); if (delimiter == null || delimiter.equals("AA")) { // test3 reporter: 'AA' - not correct // for test4 reporter use global delimiter delimiter = String.valueOf(GLOBAL_DEFAULT_DELIMITER); } String expected = (config.get(MetricOptions.SCOPE_NAMING_TM) + ".C").replaceAll("\\.", delimiter); CollectingMetricsReporter reporter = (CollectingMetricsReporter) cfg.getReporter(); for (MetricGroupAndName groupAndName : Arrays.asList(reporter.findAdded(name), reporter.findRemoved(name))) { assertThat(groupAndName.group.getMetricIdentifier(name)).isEqualTo(expected); assertThat(groupAndName.group.getMetricIdentifier(name, reporter)) .isEqualTo(expected); } } } /** Tests that the query actor will be stopped when the MetricRegistry is shut down. */ @Test void testQueryActorShutdown() throws Exception { MetricRegistryImpl registry = new MetricRegistryImpl( MetricRegistryTestUtils.defaultMetricRegistryConfiguration()); final RpcService rpcService = new TestingRpcService(); registry.startQueryService(rpcService, null); MetricQueryService queryService = checkNotNull(registry.getQueryService()); registry.closeAsync().get(); queryService.getTerminationFuture().get(); } @Test void testExceptionIsolation() throws Exception { final NotificationCapturingMetricReporter reporter1 = new NotificationCapturingMetricReporter(); MetricRegistryImpl registry = new MetricRegistryImpl( MetricRegistryTestUtils.defaultMetricRegistryConfiguration(), Arrays.asList( ReporterSetupBuilder.METRIC_SETUP_BUILDER.forReporter( "test1", new FailingReporter()), ReporterSetupBuilder.METRIC_SETUP_BUILDER.forReporter( "test2", reporter1))); Counter metric = new SimpleCounter(); registry.register( metric, "counter", new MetricGroupTest.DummyAbstractMetricGroup(registry)); assertThat(reporter1.getLastAddedMetric()).hasValue(metric); assertThat(reporter1.getLastAddedMetricName()).hasValue("counter"); registry.unregister( metric, "counter", new MetricGroupTest.DummyAbstractMetricGroup(registry)); assertThat(reporter1.getLastRemovedMetric()).hasValue(metric); assertThat(reporter1.getLastRemovedMetricName()).hasValue("counter"); registry.closeAsync().get(); } /** Reporter that throws an exception when it is notified of an added or removed metric. */ private static class FailingReporter extends TestReporter { @Override public void notifyOfAddedMetric(Metric metric, String metricName, MetricGroup group) { throw new RuntimeException(); } @Override public void notifyOfRemovedMetric(Metric metric, String metricName, MetricGroup group) { throw new RuntimeException(); } } @Test void testMetricFiltering() { final String excludedMetricName = "excluded"; final NotificationCapturingMetricReporter reporter = new NotificationCapturingMetricReporter(); final Configuration reporterConfig = new Configuration(); reporterConfig.set(MetricOptions.REPORTER_INCLUDES, Arrays.asList("*:*:counter")); reporterConfig.set( MetricOptions.REPORTER_EXCLUDES, Arrays.asList("*:" + excludedMetricName)); MetricRegistryImpl registry = new MetricRegistryImpl( MetricRegistryTestUtils.defaultMetricRegistryConfiguration(), Arrays.asList( ReporterSetupBuilder.METRIC_SETUP_BUILDER.forReporter( "test", reporter, DefaultReporterFilters.metricsFromConfiguration( reporterConfig)))); registry.register( new TestMeter(), "", new MetricGroupTest.DummyAbstractMetricGroup(registry)); assertThat(reporter.getLastAddedMetric()).isEmpty(); registry.register( new TestCounter(), excludedMetricName, new MetricGroupTest.DummyAbstractMetricGroup(registry)); assertThat(reporter.getLastAddedMetric()).isEmpty(); registry.register( new TestCounter(), "foo", new MetricGroupTest.DummyAbstractMetricGroup(registry)); assertThat(reporter.getLastAddedMetric()).isNotEmpty(); } @Test void testSpanFiltering() { final String includedGroupName = "foo"; final String excludedSpanName = "excluded"; final NotificationCapturingSpanReporter reporter = new NotificationCapturingSpanReporter(); final Configuration reporterConfig = new Configuration(); reporterConfig.set( TraceOptions.REPORTER_INCLUDES, Collections.singletonList(includedGroupName + ":*")); reporterConfig.set( TraceOptions.REPORTER_EXCLUDES, Collections.singletonList("*:" + excludedSpanName)); MetricRegistryImpl registry = new MetricRegistryImpl( MetricRegistryTestUtils.defaultMetricRegistryConfiguration(), Collections.emptyList(), Collections.singletonList( ReporterSetupBuilder.TRACE_SETUP_BUILDER.forReporter( "test", reporter, DefaultReporterFilters.tracesFromConfiguration( reporterConfig))), Collections.emptyList()); registry.addSpan( Span.builder(getClass(), "testSpan"), new MetricGroupTest.DummyAbstractMetricGroup(registry, "bar")); assertThat(reporter.getLastAddedSpan()).isEmpty(); registry.addSpan( Span.builder(getClass(), excludedSpanName), new MetricGroupTest.DummyAbstractMetricGroup(registry, includedGroupName)); assertThat(reporter.getLastAddedSpan()).isEmpty(); registry.addSpan( Span.builder(getClass(), "foo"), new MetricGroupTest.DummyAbstractMetricGroup(registry, includedGroupName)); assertThat(reporter.getLastAddedSpan()).isNotEmpty(); } @Test void testEventFiltering() { final String includedGroupName = "foo"; final String excludedSpanName = "excluded"; final NotificationCapturingEventReporter reporter = new NotificationCapturingEventReporter(); final Configuration reporterConfig = new Configuration(); reporterConfig.set( EventOptions.REPORTER_INCLUDES, Collections.singletonList(includedGroupName + ":*")); reporterConfig.set( EventOptions.REPORTER_EXCLUDES, Collections.singletonList("*:" + excludedSpanName)); MetricRegistryImpl registry = new MetricRegistryImpl( MetricRegistryTestUtils.defaultMetricRegistryConfiguration(), Collections.emptyList(), Collections.emptyList(), Collections.singletonList( ReporterSetupBuilder.EVENT_SETUP_BUILDER.forReporter( "test", reporter, DefaultReporterFilters.eventsFromConfiguration( reporterConfig)))); registry.addEvent( Event.builder(getClass(), "testEvent"), new MetricGroupTest.DummyAbstractMetricGroup(registry, "bar")); assertThat(reporter.getLastAddedEvent()).isEmpty(); registry.addEvent( Event.builder(getClass(), excludedSpanName), new MetricGroupTest.DummyAbstractMetricGroup(registry, includedGroupName)); assertThat(reporter.getLastAddedEvent()).isEmpty(); registry.addEvent( Event.builder(getClass(), "foo"), new MetricGroupTest.DummyAbstractMetricGroup(registry, includedGroupName)); assertThat(reporter.getLastAddedEvent()).isNotEmpty(); } @Test void testSpanAdditionalVariables() { final NotificationCapturingSpanReporter reporter = new NotificationCapturingSpanReporter(); final Configuration reporterConfig = new Configuration(); Map<String, String> additionalVariables = Collections.singletonMap("foo", "bar"); MetricRegistryImpl registry = new MetricRegistryImpl( MetricRegistryTestUtils.defaultMetricRegistryConfiguration(), Collections.emptyList(), Collections.singletonList( ReporterSetupBuilder.TRACE_SETUP_BUILDER.forReporter( "test", new MetricConfig(), reporter, DefaultReporterFilters.tracesFromConfiguration( reporterConfig), additionalVariables)), Collections.emptyList()); registry.addSpan( Span.builder(getClass(), "testEvent"), new MetricGroupTest.DummyAbstractMetricGroup(registry, "testGroup")); Optional<Span> lastAddedSpan = reporter.getLastAddedSpan(); assertThat(lastAddedSpan.get().getName()).isEqualTo("testEvent"); assertThat(lastAddedSpan.get().getAttributes()) .containsExactlyInAnyOrderEntriesOf(additionalVariables); } @Test void testEventAdditionalVariables() { final NotificationCapturingEventReporter reporter = new NotificationCapturingEventReporter(); final Configuration reporterConfig = new Configuration(); Map<String, String> additionalVariables = Collections.singletonMap("foo", "bar"); MetricRegistryImpl registry = new MetricRegistryImpl( MetricRegistryTestUtils.defaultMetricRegistryConfiguration(), Collections.emptyList(), Collections.emptyList(), Collections.singletonList( ReporterSetupBuilder.EVENT_SETUP_BUILDER.forReporter( "test", new MetricConfig(), reporter, DefaultReporterFilters.eventsFromConfiguration( reporterConfig), additionalVariables))); registry.addEvent( Event.builder(getClass(), "testEvent"), new MetricGroupTest.DummyAbstractMetricGroup(registry, "testGroup")); Optional<Event> lastAddedEvent = reporter.getLastAddedEvent(); assertThat(lastAddedEvent.get().getName()).isEqualTo("testEvent"); assertThat(lastAddedEvent.get().getAttributes()) .containsExactlyInAnyOrderEntriesOf(additionalVariables); } }
apache/ofbiz
35,344
framework/service/src/main/java/org/apache/ofbiz/service/ServiceUtil.java
/******************************************************************************* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. *******************************************************************************/ package org.apache.ofbiz.service; import java.math.BigDecimal; import java.sql.Timestamp; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.TimeZone; import javax.servlet.http.HttpServletRequest; import javax.transaction.Transaction; import org.apache.ofbiz.base.config.GenericConfigException; import org.apache.ofbiz.base.util.Debug; import org.apache.ofbiz.base.util.UtilDateTime; import org.apache.ofbiz.base.util.UtilGenerics; import org.apache.ofbiz.base.util.UtilMisc; import org.apache.ofbiz.base.util.UtilProperties; import org.apache.ofbiz.base.util.UtilValidate; import org.apache.ofbiz.entity.Delegator; import org.apache.ofbiz.entity.GenericEntityException; import org.apache.ofbiz.entity.GenericValue; import org.apache.ofbiz.entity.condition.EntityCondition; import org.apache.ofbiz.entity.condition.EntityExpr; import org.apache.ofbiz.entity.condition.EntityOperator; import org.apache.ofbiz.entity.transaction.GenericTransactionException; import org.apache.ofbiz.entity.transaction.TransactionUtil; import org.apache.ofbiz.entity.util.EntityListIterator; import org.apache.ofbiz.entity.util.EntityQuery; import org.apache.ofbiz.security.Security; import org.apache.ofbiz.service.config.ServiceConfigUtil; import com.ibm.icu.util.Calendar; /** * Generic Service Utility Class */ public final class ServiceUtil { public static final String module = ServiceUtil.class.getName(); private static final String resource = "ServiceErrorUiLabels"; private ServiceUtil () {} /** A little short-cut method to check to see if a service returned an error */ public static boolean isError(Map<String, ? extends Object> results) { if (results == null || results.get(ModelService.RESPONSE_MESSAGE) == null) { return false; } return ModelService.RESPOND_ERROR.equals(results.get(ModelService.RESPONSE_MESSAGE)); } public static boolean isFailure(Map<String, ? extends Object> results) { if (results == null || results.get(ModelService.RESPONSE_MESSAGE) == null) { return false; } return ModelService.RESPOND_FAIL.equals(results.get(ModelService.RESPONSE_MESSAGE)); } /** A little short-cut method to check to see if a service was successful (neither error or failed) */ public static boolean isSuccess(Map<String, ? extends Object> results) { if (ServiceUtil.isError(results) || ServiceUtil.isFailure(results)) { return false; } return true; } /** A small routine used all over to improve code efficiency, make a result map with the message and the error response code */ public static Map<String, Object> returnError(String errorMessage) { return returnProblem(ModelService.RESPOND_ERROR, errorMessage, null, null, null); } /** A small routine used all over to improve code efficiency, make a result map with the message and the error response code */ public static Map<String, Object> returnError(String errorMessage, List<? extends Object> errorMessageList) { return returnProblem(ModelService.RESPOND_ERROR, errorMessage, errorMessageList, null, null); } /** A small routine used all over to improve code efficiency, make a result map with the message and the error response code */ public static Map<String, Object> returnError(List<? extends Object> errorMessageList) { return returnProblem(ModelService.RESPOND_ERROR, null, errorMessageList, null, null); } public static Map<String, Object> returnFailure(String errorMessage) { return returnProblem(ModelService.RESPOND_FAIL, errorMessage, null, null, null); } public static Map<String, Object> returnFailure(List<? extends Object> errorMessageList) { return returnProblem(ModelService.RESPOND_FAIL, null, errorMessageList, null, null); } public static Map<String, Object> returnFailure() { return returnProblem(ModelService.RESPOND_FAIL, null, null, null, null); } /** A small routine used all over to improve code efficiency, make a result map with the message and the error response code, also forwards any error messages from the nestedResult */ public static Map<String, Object> returnError(String errorMessage, List<? extends Object> errorMessageList, Map<String, ? extends Object> errorMessageMap, Map<String, ? extends Object> nestedResult) { return returnProblem(ModelService.RESPOND_ERROR, errorMessage, errorMessageList, errorMessageMap, nestedResult); } public static Map<String, Object> returnProblem(String returnType, String errorMessage, List<? extends Object> errorMessageList, Map<String, ? extends Object> errorMessageMap, Map<String, ? extends Object> nestedResult) { Map<String, Object> result = new HashMap<String, Object>(); result.put(ModelService.RESPONSE_MESSAGE, returnType); if (errorMessage != null) { result.put(ModelService.ERROR_MESSAGE, errorMessage); } List<Object> errorList = new LinkedList<Object>(); if (errorMessageList != null) { errorList.addAll(errorMessageList); } Map<String, Object> errorMap = new HashMap<String, Object>(); if (errorMessageMap != null) { errorMap.putAll(errorMessageMap); } if (nestedResult != null) { if (nestedResult.get(ModelService.ERROR_MESSAGE) != null) { errorList.add(nestedResult.get(ModelService.ERROR_MESSAGE)); } if (nestedResult.get(ModelService.ERROR_MESSAGE_LIST) != null) { errorList.addAll(UtilGenerics.checkList(nestedResult.get(ModelService.ERROR_MESSAGE_LIST))); } if (nestedResult.get(ModelService.ERROR_MESSAGE_MAP) != null) { errorMap.putAll(UtilGenerics.<String, Object>checkMap(nestedResult.get(ModelService.ERROR_MESSAGE_MAP))); } } if (errorList.size() > 0) { result.put(ModelService.ERROR_MESSAGE_LIST, errorList); } if (errorMap.size() > 0) { result.put(ModelService.ERROR_MESSAGE_MAP, errorMap); } return result; } /** A small routine used all over to improve code efficiency, make a result map with the message and the success response code */ public static Map<String, Object> returnSuccess(String successMessage) { return returnMessage(ModelService.RESPOND_SUCCESS, successMessage); } /** A small routine used all over to improve code efficiency, make a result map with the message and the success response code */ public static Map<String, Object> returnSuccess() { return returnMessage(ModelService.RESPOND_SUCCESS, null); } /** A small routine used all over to improve code efficiency, make a result map with the message and the success response code */ public static Map<String, Object> returnSuccess(List<String> successMessageList) { Map<String, Object> result = returnMessage(ModelService.RESPOND_SUCCESS, null); result.put(ModelService.SUCCESS_MESSAGE_LIST, successMessageList); return result; } /** A small routine to make a result map with the message and the response code * NOTE: This brings out some bad points to our message convention: we should be using a single message or message list * and what type of message that is should be determined by the RESPONSE_MESSAGE (and there's another annoyance, it should be RESPONSE_CODE) */ public static Map<String, Object> returnMessage(String code, String message) { Map<String, Object> result = new HashMap<String, Object>(); if (code != null) result.put(ModelService.RESPONSE_MESSAGE, code); if (message != null) result.put(ModelService.SUCCESS_MESSAGE, message); return result; } /** A small routine used all over to improve code efficiency, get the partyId and does a security check *<b>security check</b>: userLogin partyId must equal partyId, or must have [secEntity][secOperation] permission */ public static String getPartyIdCheckSecurity(GenericValue userLogin, Security security, Map<String, ? extends Object> context, Map<String, Object> result, String secEntity, String secOperation) { return getPartyIdCheckSecurity(userLogin, security, context, result, secEntity, secOperation, null, null); } public static String getPartyIdCheckSecurity(GenericValue userLogin, Security security, Map<String, ? extends Object> context, Map<String, Object> result, String secEntity, String secOperation, String adminSecEntity, String adminSecOperation) { String partyId = (String) context.get("partyId"); Locale locale = getLocale(context); if (UtilValidate.isEmpty(partyId)) { partyId = userLogin.getString("partyId"); } // partyId might be null, so check it if (UtilValidate.isEmpty(partyId)) { result.put(ModelService.RESPONSE_MESSAGE, ModelService.RESPOND_ERROR); String errMsg = UtilProperties.getMessage(ServiceUtil.resource, "serviceUtil.party_id_missing", locale) + "."; result.put(ModelService.ERROR_MESSAGE, errMsg); return partyId; } // <b>security check</b>: userLogin partyId must equal partyId, or must have either of the two permissions if (!partyId.equals(userLogin.getString("partyId"))) { if (!security.hasEntityPermission(secEntity, secOperation, userLogin) && !(adminSecEntity != null && adminSecOperation != null && security.hasEntityPermission(adminSecEntity, adminSecOperation, userLogin))) { result.put(ModelService.RESPONSE_MESSAGE, ModelService.RESPOND_ERROR); String errMsg = UtilProperties.getMessage(ServiceUtil.resource, "serviceUtil.no_permission_to_operation", locale) + "."; result.put(ModelService.ERROR_MESSAGE, errMsg); return partyId; } } return partyId; } public static void setMessages(HttpServletRequest request, String errorMessage, String eventMessage, String defaultMessage) { if (UtilValidate.isNotEmpty(errorMessage)) request.setAttribute("_ERROR_MESSAGE_", errorMessage); if (UtilValidate.isNotEmpty(eventMessage)) request.setAttribute("_EVENT_MESSAGE_", eventMessage); if (UtilValidate.isEmpty(errorMessage) && UtilValidate.isEmpty(eventMessage) && UtilValidate.isNotEmpty(defaultMessage)) request.setAttribute("_EVENT_MESSAGE_", defaultMessage); } public static void getMessages(HttpServletRequest request, Map<String, ? extends Object> result, String defaultMessage) { getMessages(request, result, defaultMessage, null, null, null, null, null, null); } public static void getMessages(HttpServletRequest request, Map<String, ? extends Object> result, String defaultMessage, String msgPrefix, String msgSuffix, String errorPrefix, String errorSuffix, String successPrefix, String successSuffix) { String errorMessage = ServiceUtil.makeErrorMessage(result, msgPrefix, msgSuffix, errorPrefix, errorSuffix); String successMessage = ServiceUtil.makeSuccessMessage(result, msgPrefix, msgSuffix, successPrefix, successSuffix); setMessages(request, errorMessage, successMessage, defaultMessage); } public static String getErrorMessage(Map<String, ? extends Object> result) { StringBuilder errorMessage = new StringBuilder(); if (result.get(ModelService.ERROR_MESSAGE) != null) errorMessage.append((String) result.get(ModelService.ERROR_MESSAGE)); if (result.get(ModelService.ERROR_MESSAGE_LIST) != null) { List<? extends Object> errors = UtilGenerics.checkList(result.get(ModelService.ERROR_MESSAGE_LIST)); for (Object message: errors) { // NOTE: this MUST use toString and not cast to String because it may be a MessageString object String curMessage = message.toString(); if (errorMessage.length() > 0) { errorMessage.append(", "); } errorMessage.append(curMessage); } } return errorMessage.toString(); } public static String makeErrorMessage(Map<String, ? extends Object> result, String msgPrefix, String msgSuffix, String errorPrefix, String errorSuffix) { if (result == null) { Debug.logWarning("A null result map was passed", module); return null; } String errorMsg = (String) result.get(ModelService.ERROR_MESSAGE); List<? extends Object> errorMsgList = UtilGenerics.checkList(result.get(ModelService.ERROR_MESSAGE_LIST)); Map<String, ? extends Object> errorMsgMap = UtilGenerics.checkMap(result.get(ModelService.ERROR_MESSAGE_MAP)); StringBuilder outMsg = new StringBuilder(); if (errorMsg != null) { if (msgPrefix != null) outMsg.append(msgPrefix); outMsg.append(errorMsg); if (msgSuffix != null) outMsg.append(msgSuffix); } outMsg.append(makeMessageList(errorMsgList, msgPrefix, msgSuffix)); if (errorMsgMap != null) { for (Map.Entry<String, ? extends Object> entry: errorMsgMap.entrySet()) { outMsg.append(msgPrefix); outMsg.append(entry.getKey()); outMsg.append(": "); outMsg.append(entry.getValue()); outMsg.append(msgSuffix); } } if (outMsg.length() > 0) { StringBuilder strBuf = new StringBuilder(); if (errorPrefix != null) strBuf.append(errorPrefix); strBuf.append(outMsg.toString()); if (errorSuffix != null) strBuf.append(errorSuffix); return strBuf.toString(); } else { return null; } } public static String makeSuccessMessage(Map<String, ? extends Object> result, String msgPrefix, String msgSuffix, String successPrefix, String successSuffix) { if (result == null) { return ""; } String successMsg = (String) result.get(ModelService.SUCCESS_MESSAGE); List<? extends Object> successMsgList = UtilGenerics.checkList(result.get(ModelService.SUCCESS_MESSAGE_LIST)); StringBuilder outMsg = new StringBuilder(); outMsg.append(makeMessageList(successMsgList, msgPrefix, msgSuffix)); if (successMsg != null) { if (msgPrefix != null) outMsg.append(msgPrefix); outMsg.append(successMsg); if (msgSuffix != null) outMsg.append(msgSuffix); } if (outMsg.length() > 0) { StringBuilder strBuf = new StringBuilder(); if (successPrefix != null) strBuf.append(successPrefix); strBuf.append(outMsg.toString()); if (successSuffix != null) strBuf.append(successSuffix); return strBuf.toString(); } else { return null; } } public static String makeMessageList(List<? extends Object> msgList, String msgPrefix, String msgSuffix) { StringBuilder outMsg = new StringBuilder(); if (UtilValidate.isNotEmpty(msgList)) { for (Object msg: msgList) { if (msg == null) continue; String curMsg = msg.toString(); if (msgPrefix != null) outMsg.append(msgPrefix); outMsg.append(curMsg); if (msgSuffix != null) outMsg.append(msgSuffix); } } return outMsg.toString(); } /** * Takes the result of an invocation and extracts any error messages * and adds them to the targetList or targetMap. This will handle both List and String * error messags. * * @param targetList The List to add the error messages to * @param targetMap The Map to add any Map error messages to * @param callResult The result from an invocation */ public static void addErrors(List<String> targetList, Map<String, Object> targetMap, Map<String, ? extends Object> callResult) { List<String> newList; Map<String, Object> errorMsgMap; //See if there is a single message if (callResult.containsKey(ModelService.ERROR_MESSAGE)) { targetList.add((String) callResult.get(ModelService.ERROR_MESSAGE)); } //See if there is a message list if (callResult.containsKey(ModelService.ERROR_MESSAGE_LIST)) { newList = UtilGenerics.checkList(callResult.get(ModelService.ERROR_MESSAGE_LIST)); targetList.addAll(newList); } //See if there are an error message map if (callResult.containsKey(ModelService.ERROR_MESSAGE_MAP)) { errorMsgMap = UtilGenerics.checkMap(callResult.get(ModelService.ERROR_MESSAGE_MAP)); targetMap.putAll(errorMsgMap); } } public static Map<String, Object> purgeOldJobs(DispatchContext dctx, Map<String, ? extends Object> context) { Locale locale = (Locale)context.get("locale"); Debug.logWarning("purgeOldJobs service invoked. This service is obsolete - the Job Scheduler will purge old jobs automatically.", module); String sendPool = null; Calendar cal = Calendar.getInstance(); try { sendPool = ServiceConfigUtil.getServiceEngine().getThreadPool().getSendToPool(); int daysToKeep = ServiceConfigUtil.getServiceEngine().getThreadPool().getPurgeJobDays(); cal.add(Calendar.DAY_OF_YEAR, -daysToKeep); } catch (GenericConfigException e) { Debug.logWarning(e, "Exception thrown while getting service configuration: ", module); return returnError(UtilProperties.getMessage(ServiceUtil.resource, "ServiceExceptionThrownWhileGettingServiceConfiguration", UtilMisc.toMap("errorString", e), locale)); } Delegator delegator = dctx.getDelegator(); Timestamp purgeTime = new Timestamp(cal.getTimeInMillis()); // create the conditions to query EntityCondition pool = EntityCondition.makeCondition("poolId", sendPool); List<EntityExpr> finExp = UtilMisc.toList(EntityCondition.makeCondition("finishDateTime", EntityOperator.NOT_EQUAL, null)); finExp.add(EntityCondition.makeCondition("finishDateTime", EntityOperator.LESS_THAN, purgeTime)); List<EntityExpr> canExp = UtilMisc.toList(EntityCondition.makeCondition("cancelDateTime", EntityOperator.NOT_EQUAL, null)); canExp.add(EntityCondition.makeCondition("cancelDateTime", EntityOperator.LESS_THAN, purgeTime)); EntityCondition cancelled = EntityCondition.makeCondition(canExp); EntityCondition finished = EntityCondition.makeCondition(finExp); EntityCondition doneCond = EntityCondition.makeCondition(UtilMisc.toList(cancelled, finished), EntityOperator.OR); // always suspend the current transaction; use the one internally Transaction parent = null; try { if (TransactionUtil.getStatus() != TransactionUtil.STATUS_NO_TRANSACTION) { parent = TransactionUtil.suspend(); } // lookup the jobs - looping 1000 at a time to avoid problems with cursors // also, using unique transaction to delete as many as possible even with errors boolean noMoreResults = false; boolean beganTx1 = false; while (!noMoreResults) { // current list of records List<GenericValue> curList = null; try { // begin this transaction beganTx1 = TransactionUtil.begin(); EntityListIterator foundJobs = null; try { foundJobs = EntityQuery.use(delegator) .select("jobId") .from("JobSandbox") .where(EntityCondition.makeCondition(UtilMisc.toList(doneCond, pool))) .cursorScrollInsensitive() .maxRows(1000) .queryIterator(); curList = foundJobs.getPartialList(1, 1000); } finally { if (foundJobs != null) { foundJobs.close(); } } } catch (GenericEntityException e) { Debug.logError(e, "Cannot obtain job data from datasource", module); try { TransactionUtil.rollback(beganTx1, e.getMessage(), e); } catch (GenericTransactionException e1) { Debug.logWarning(e1, module); } return ServiceUtil.returnError(e.getMessage()); } finally { try { TransactionUtil.commit(beganTx1); } catch (GenericTransactionException e) { Debug.logWarning(e, module); } } // remove each from the list in its own transaction if (UtilValidate.isNotEmpty(curList)) { for (GenericValue job: curList) { String jobId = job.getString("jobId"); boolean beganTx2 = false; try { beganTx2 = TransactionUtil.begin(); job.remove(); } catch (GenericEntityException e) { Debug.logInfo("Cannot remove job data for ID: " + jobId, module); try { TransactionUtil.rollback(beganTx2, e.getMessage(), e); } catch (GenericTransactionException e1) { Debug.logWarning(e1, module); } } finally { try { TransactionUtil.commit(beganTx2); } catch (GenericTransactionException e) { Debug.logWarning(e, module); } } } } else { noMoreResults = true; } } // Now JobSandbox data is cleaned up. Now process Runtime data and remove the whole data in single shot that is of no need. boolean beganTx3 = false; GenericValue runtimeData = null; EntityListIterator runTimeDataIt = null; List<GenericValue> runtimeDataToDelete = new LinkedList<GenericValue>(); long jobsandBoxCount = 0; try { // begin this transaction beganTx3 = TransactionUtil.begin(); runTimeDataIt = EntityQuery.use(delegator).select("runtimeDataId").from("RuntimeData").queryIterator(); try { while ((runtimeData = runTimeDataIt.next()) != null) { EntityCondition whereCondition = EntityCondition.makeCondition(UtilMisc.toList(EntityCondition.makeCondition("runtimeDataId", EntityOperator.NOT_EQUAL, null), EntityCondition.makeCondition("runtimeDataId", EntityOperator.EQUALS, runtimeData.getString("runtimeDataId"))), EntityOperator.AND); jobsandBoxCount = EntityQuery.use(delegator).from("JobSandbox").where(whereCondition).queryCount(); if (BigDecimal.ZERO.compareTo(BigDecimal.valueOf(jobsandBoxCount)) == 0) { runtimeDataToDelete.add(runtimeData); } } } finally { runTimeDataIt.close(); } // Now we are ready to delete runtimeData, we can safely delete complete list that we have recently fetched i.e runtimeDataToDelete. delegator.removeAll(runtimeDataToDelete); } catch (GenericEntityException e) { Debug.logError(e, "Cannot obtain runtime data from datasource", module); try { TransactionUtil.rollback(beganTx3, e.getMessage(), e); } catch (GenericTransactionException e1) { Debug.logWarning(e1, module); } return ServiceUtil.returnError(e.getMessage()); } finally { try { TransactionUtil.commit(beganTx3); } catch (GenericTransactionException e) { Debug.logWarning(e, module); } } } catch (GenericTransactionException e) { Debug.logError(e, "Unable to suspend transaction; cannot purge jobs!", module); return ServiceUtil.returnError(e.getMessage()); } finally { if (parent != null) { try { TransactionUtil.resume(parent); } catch (GenericTransactionException e) { Debug.logWarning(e, module); } } } return ServiceUtil.returnSuccess(); } public static Map<String, Object> cancelJob(DispatchContext dctx, Map<String, ? extends Object> context) { Delegator delegator = dctx.getDelegator(); Security security = dctx.getSecurity(); GenericValue userLogin = (GenericValue) context.get("userLogin"); Locale locale = getLocale(context); if (!security.hasPermission("SERVICE_INVOKE_ANY", userLogin)) { String errMsg = UtilProperties.getMessage(ServiceUtil.resource, "serviceUtil.no_permission_to_run", locale) + "."; return ServiceUtil.returnError(errMsg); } String jobId = (String) context.get("jobId"); Map<String, Object> fields = UtilMisc.<String, Object>toMap("jobId", jobId); GenericValue job = null; try { job = EntityQuery.use(delegator).from("JobSandbox").where("jobId", jobId).queryOne(); if (job != null) { job.set("cancelDateTime", UtilDateTime.nowTimestamp()); job.set("statusId", "SERVICE_CANCELLED"); job.store(); } } catch (GenericEntityException e) { Debug.logError(e, module); String errMsg = UtilProperties.getMessage(ServiceUtil.resource, "serviceUtil.unable_to_cancel_job", locale) + " : " + fields; return ServiceUtil.returnError(errMsg); } Timestamp cancelDate = job.getTimestamp("cancelDateTime"); if (cancelDate != null) { Map<String, Object> result = ServiceUtil.returnSuccess(); result.put("cancelDateTime", cancelDate); result.put("statusId", "SERVICE_PENDING"); // To more easily see current pending jobs and possibly cancel some others return result; } else { String errMsg = UtilProperties.getMessage(ServiceUtil.resource, "serviceUtil.unable_to_cancel_job", locale) + " : " + job; return ServiceUtil.returnError(errMsg); } } public static Map<String, Object> cancelJobRetries(DispatchContext dctx, Map<String, ? extends Object> context) { Delegator delegator = dctx.getDelegator(); Security security = dctx.getSecurity(); GenericValue userLogin = (GenericValue) context.get("userLogin"); Locale locale = getLocale(context); if (!security.hasPermission("SERVICE_INVOKE_ANY", userLogin)) { String errMsg = UtilProperties.getMessage(ServiceUtil.resource, "serviceUtil.no_permission_to_run", locale) + "."; return ServiceUtil.returnError(errMsg); } String jobId = (String) context.get("jobId"); Map<String, Object> fields = UtilMisc.<String, Object>toMap("jobId", jobId); GenericValue job = null; try { job = EntityQuery.use(delegator).from("JobSandbox").where("jobId", jobId).queryOne(); if (job != null) { job.set("maxRetry", Long.valueOf(0)); job.store(); } } catch (GenericEntityException e) { Debug.logError(e, module); String errMsg = UtilProperties.getMessage(ServiceUtil.resource, "serviceUtil.unable_to_cancel_job_retries", locale) + " : " + fields; return ServiceUtil.returnError(errMsg); } Timestamp cancelDate = job.getTimestamp("cancelDateTime"); if (cancelDate != null) { return ServiceUtil.returnSuccess(); } else { String errMsg = UtilProperties.getMessage(ServiceUtil.resource, "serviceUtil.unable_to_cancel_job_retries", locale) + " : " + job; return ServiceUtil.returnError(errMsg); } } public static Map<String, Object> genericDateCondition(DispatchContext dctx, Map<String, ? extends Object> context) { Timestamp fromDate = (Timestamp) context.get("fromDate"); Timestamp thruDate = (Timestamp) context.get("thruDate"); Timestamp now = UtilDateTime.nowTimestamp(); boolean reply = true; if (fromDate != null && fromDate.after(now)) reply = false; if (thruDate != null && thruDate.before(now)) reply = false; Map<String, Object> result = ServiceUtil.returnSuccess(); result.put("conditionReply", reply); return result; } public static GenericValue getUserLogin(DispatchContext dctx, Map<String, ? extends Object> context, String runAsUser) { GenericValue userLogin = (GenericValue) context.get("userLogin"); Delegator delegator = dctx.getDelegator(); if (UtilValidate.isNotEmpty(runAsUser)) { try { GenericValue runAs = EntityQuery.use(delegator).from("UserLogin").where("userLoginId", runAsUser).cache().queryOne(); if (runAs != null) { userLogin = runAs; } } catch (GenericEntityException e) { Debug.logError(e, module); } } return userLogin; } private static Locale getLocale(Map<String, ? extends Object> context) { Locale locale = (Locale) context.get("locale"); if (locale == null) { locale = Locale.getDefault(); } return locale; } public static <T extends Object> Map<String, Object> makeContext(T... args) { if (args != null) { for (int i = 0; i < args.length; i += 2) { if (!(args[i] instanceof String)) throw new IllegalArgumentException("Arg(" + i + "), value(" + args[i] + ") is not a string."); } } return UtilGenerics.checkMap(UtilMisc.toMap(args)); } public static Map<String, Object> resetJob(DispatchContext dctx, Map<String, Object> context) { Delegator delegator = dctx.getDelegator(); Security security = dctx.getSecurity(); GenericValue userLogin = (GenericValue) context.get("userLogin"); Locale locale = getLocale(context); if (!security.hasPermission("SERVICE_INVOKE_ANY", userLogin)) { String errMsg = UtilProperties.getMessage(ServiceUtil.resource, "serviceUtil.no_permission_to_run", locale) + "."; return ServiceUtil.returnError(errMsg); } String jobId = (String) context.get("jobId"); GenericValue job; try { job = EntityQuery.use(delegator).from("JobSandbox").where("jobId", jobId).cache().queryOne(); } catch (GenericEntityException e) { Debug.logError(e, module); return ServiceUtil.returnError(e.getMessage()); } // update the job if (job != null) { job.set("statusId", "SERVICE_PENDING"); job.set("startDateTime", null); job.set("finishDateTime", null); job.set("cancelDateTime", null); job.set("runByInstanceId", null); // save the job try { job.store(); } catch (GenericEntityException e) { Debug.logError(e, module); return ServiceUtil.returnError(e.getMessage()); } } return ServiceUtil.returnSuccess(); } /** * Checks all incoming service attributes and look for fields with the same * name in the incoming map and copy those onto the outgoing map. Also * includes a userLogin if service requires one. * * @param dispatcher * @param serviceName * @param fromMap * @param userLogin * (optional) - will be added to the map if is required * @param timeZone * @param locale * @return filled Map or null on error * @throws GeneralServiceException */ public static Map<String, Object> setServiceFields(LocalDispatcher dispatcher, String serviceName, Map<String, Object> fromMap, GenericValue userLogin, TimeZone timeZone, Locale locale) throws GeneralServiceException { Map<String, Object> outMap = new HashMap<String, Object>(); ModelService modelService = null; try { modelService = dispatcher.getDispatchContext().getModelService(serviceName); } catch (GenericServiceException e) { String errMsg = "Could not get service definition for service name [" + serviceName + "]: "; Debug.logError(e, errMsg, module); throw new GeneralServiceException(e); } outMap.putAll(modelService.makeValid(fromMap, "IN", true, null, timeZone, locale)); if (userLogin != null && modelService.auth) { outMap.put("userLogin", userLogin); } return outMap; } public static String getResource() { return resource; } }
google/java-photoslibrary
35,052
photoslibraryapi/src/main/java/com/google/photos/library/v1/proto/UpdateAlbumRequest.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/photos/library/v1/photos_library.proto package com.google.photos.library.v1.proto; /** * * * <pre> * Request to update an album in Google Photos. * </pre> * * Protobuf type {@code google.photos.library.v1.UpdateAlbumRequest} */ public final class UpdateAlbumRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.photos.library.v1.UpdateAlbumRequest) UpdateAlbumRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateAlbumRequest.newBuilder() to construct. private UpdateAlbumRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateAlbumRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateAlbumRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.photos.library.v1.proto.LibraryServiceProto .internal_static_google_photos_library_v1_UpdateAlbumRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.photos.library.v1.proto.LibraryServiceProto .internal_static_google_photos_library_v1_UpdateAlbumRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.photos.library.v1.proto.UpdateAlbumRequest.class, com.google.photos.library.v1.proto.UpdateAlbumRequest.Builder.class); } public static final int ALBUM_FIELD_NUMBER = 1; private com.google.photos.types.proto.Album album_; /** * * * <pre> * Required. The [Album][google.photos.types.Album] to update. * The album’s `id` field is used to identify the album to be updated. * The album’s `title` field is used to set the new album title. * The album’s `cover_photo_media_item_id` field is used to set the new album * cover photo. * </pre> * * <code>.google.photos.types.Album album = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return Whether the album field is set. */ @java.lang.Override public boolean hasAlbum() { return album_ != null; } /** * * * <pre> * Required. The [Album][google.photos.types.Album] to update. * The album’s `id` field is used to identify the album to be updated. * The album’s `title` field is used to set the new album title. * The album’s `cover_photo_media_item_id` field is used to set the new album * cover photo. * </pre> * * <code>.google.photos.types.Album album = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The album. */ @java.lang.Override public com.google.photos.types.proto.Album getAlbum() { return album_ == null ? com.google.photos.types.proto.Album.getDefaultInstance() : album_; } /** * * * <pre> * Required. The [Album][google.photos.types.Album] to update. * The album’s `id` field is used to identify the album to be updated. * The album’s `title` field is used to set the new album title. * The album’s `cover_photo_media_item_id` field is used to set the new album * cover photo. * </pre> * * <code>.google.photos.types.Album album = 1 [(.google.api.field_behavior) = REQUIRED];</code> */ @java.lang.Override public com.google.photos.types.proto.AlbumOrBuilder getAlbumOrBuilder() { return album_ == null ? com.google.photos.types.proto.Album.getDefaultInstance() : album_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Required. Indicate what fields in the provided album to update. * The only valid values are `title` and `cover_photo_media_item_id`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return updateMask_ != null; } /** * * * <pre> * Required. Indicate what fields in the provided album to update. * The only valid values are `title` and `cover_photo_media_item_id`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Required. Indicate what fields in the provided album to update. * The only valid values are `title` and `cover_photo_media_item_id`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (album_ != null) { output.writeMessage(1, getAlbum()); } if (updateMask_ != null) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (album_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getAlbum()); } if (updateMask_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.photos.library.v1.proto.UpdateAlbumRequest)) { return super.equals(obj); } com.google.photos.library.v1.proto.UpdateAlbumRequest other = (com.google.photos.library.v1.proto.UpdateAlbumRequest) obj; if (hasAlbum() != other.hasAlbum()) return false; if (hasAlbum()) { if (!getAlbum().equals(other.getAlbum())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasAlbum()) { hash = (37 * hash) + ALBUM_FIELD_NUMBER; hash = (53 * hash) + getAlbum().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.photos.library.v1.proto.UpdateAlbumRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.photos.library.v1.proto.UpdateAlbumRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.photos.library.v1.proto.UpdateAlbumRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.photos.library.v1.proto.UpdateAlbumRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.photos.library.v1.proto.UpdateAlbumRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.photos.library.v1.proto.UpdateAlbumRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.photos.library.v1.proto.UpdateAlbumRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.photos.library.v1.proto.UpdateAlbumRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.photos.library.v1.proto.UpdateAlbumRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.photos.library.v1.proto.UpdateAlbumRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.photos.library.v1.proto.UpdateAlbumRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.photos.library.v1.proto.UpdateAlbumRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.photos.library.v1.proto.UpdateAlbumRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request to update an album in Google Photos. * </pre> * * Protobuf type {@code google.photos.library.v1.UpdateAlbumRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.photos.library.v1.UpdateAlbumRequest) com.google.photos.library.v1.proto.UpdateAlbumRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.photos.library.v1.proto.LibraryServiceProto .internal_static_google_photos_library_v1_UpdateAlbumRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.photos.library.v1.proto.LibraryServiceProto .internal_static_google_photos_library_v1_UpdateAlbumRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.photos.library.v1.proto.UpdateAlbumRequest.class, com.google.photos.library.v1.proto.UpdateAlbumRequest.Builder.class); } // Construct using com.google.photos.library.v1.proto.UpdateAlbumRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; album_ = null; if (albumBuilder_ != null) { albumBuilder_.dispose(); albumBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.photos.library.v1.proto.LibraryServiceProto .internal_static_google_photos_library_v1_UpdateAlbumRequest_descriptor; } @java.lang.Override public com.google.photos.library.v1.proto.UpdateAlbumRequest getDefaultInstanceForType() { return com.google.photos.library.v1.proto.UpdateAlbumRequest.getDefaultInstance(); } @java.lang.Override public com.google.photos.library.v1.proto.UpdateAlbumRequest build() { com.google.photos.library.v1.proto.UpdateAlbumRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.photos.library.v1.proto.UpdateAlbumRequest buildPartial() { com.google.photos.library.v1.proto.UpdateAlbumRequest result = new com.google.photos.library.v1.proto.UpdateAlbumRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.photos.library.v1.proto.UpdateAlbumRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.album_ = albumBuilder_ == null ? album_ : albumBuilder_.build(); } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.photos.library.v1.proto.UpdateAlbumRequest) { return mergeFrom((com.google.photos.library.v1.proto.UpdateAlbumRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.photos.library.v1.proto.UpdateAlbumRequest other) { if (other == com.google.photos.library.v1.proto.UpdateAlbumRequest.getDefaultInstance()) return this; if (other.hasAlbum()) { mergeAlbum(other.getAlbum()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getAlbumFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.photos.types.proto.Album album_; private com.google.protobuf.SingleFieldBuilderV3< com.google.photos.types.proto.Album, com.google.photos.types.proto.Album.Builder, com.google.photos.types.proto.AlbumOrBuilder> albumBuilder_; /** * * * <pre> * Required. The [Album][google.photos.types.Album] to update. * The album’s `id` field is used to identify the album to be updated. * The album’s `title` field is used to set the new album title. * The album’s `cover_photo_media_item_id` field is used to set the new album * cover photo. * </pre> * * <code>.google.photos.types.Album album = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return Whether the album field is set. */ public boolean hasAlbum() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The [Album][google.photos.types.Album] to update. * The album’s `id` field is used to identify the album to be updated. * The album’s `title` field is used to set the new album title. * The album’s `cover_photo_media_item_id` field is used to set the new album * cover photo. * </pre> * * <code>.google.photos.types.Album album = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The album. */ public com.google.photos.types.proto.Album getAlbum() { if (albumBuilder_ == null) { return album_ == null ? com.google.photos.types.proto.Album.getDefaultInstance() : album_; } else { return albumBuilder_.getMessage(); } } /** * * * <pre> * Required. The [Album][google.photos.types.Album] to update. * The album’s `id` field is used to identify the album to be updated. * The album’s `title` field is used to set the new album title. * The album’s `cover_photo_media_item_id` field is used to set the new album * cover photo. * </pre> * * <code>.google.photos.types.Album album = 1 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder setAlbum(com.google.photos.types.proto.Album value) { if (albumBuilder_ == null) { if (value == null) { throw new NullPointerException(); } album_ = value; } else { albumBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The [Album][google.photos.types.Album] to update. * The album’s `id` field is used to identify the album to be updated. * The album’s `title` field is used to set the new album title. * The album’s `cover_photo_media_item_id` field is used to set the new album * cover photo. * </pre> * * <code>.google.photos.types.Album album = 1 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder setAlbum(com.google.photos.types.proto.Album.Builder builderForValue) { if (albumBuilder_ == null) { album_ = builderForValue.build(); } else { albumBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The [Album][google.photos.types.Album] to update. * The album’s `id` field is used to identify the album to be updated. * The album’s `title` field is used to set the new album title. * The album’s `cover_photo_media_item_id` field is used to set the new album * cover photo. * </pre> * * <code>.google.photos.types.Album album = 1 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder mergeAlbum(com.google.photos.types.proto.Album value) { if (albumBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && album_ != null && album_ != com.google.photos.types.proto.Album.getDefaultInstance()) { getAlbumBuilder().mergeFrom(value); } else { album_ = value; } } else { albumBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The [Album][google.photos.types.Album] to update. * The album’s `id` field is used to identify the album to be updated. * The album’s `title` field is used to set the new album title. * The album’s `cover_photo_media_item_id` field is used to set the new album * cover photo. * </pre> * * <code>.google.photos.types.Album album = 1 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder clearAlbum() { bitField0_ = (bitField0_ & ~0x00000001); album_ = null; if (albumBuilder_ != null) { albumBuilder_.dispose(); albumBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The [Album][google.photos.types.Album] to update. * The album’s `id` field is used to identify the album to be updated. * The album’s `title` field is used to set the new album title. * The album’s `cover_photo_media_item_id` field is used to set the new album * cover photo. * </pre> * * <code>.google.photos.types.Album album = 1 [(.google.api.field_behavior) = REQUIRED];</code> */ public com.google.photos.types.proto.Album.Builder getAlbumBuilder() { bitField0_ |= 0x00000001; onChanged(); return getAlbumFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The [Album][google.photos.types.Album] to update. * The album’s `id` field is used to identify the album to be updated. * The album’s `title` field is used to set the new album title. * The album’s `cover_photo_media_item_id` field is used to set the new album * cover photo. * </pre> * * <code>.google.photos.types.Album album = 1 [(.google.api.field_behavior) = REQUIRED];</code> */ public com.google.photos.types.proto.AlbumOrBuilder getAlbumOrBuilder() { if (albumBuilder_ != null) { return albumBuilder_.getMessageOrBuilder(); } else { return album_ == null ? com.google.photos.types.proto.Album.getDefaultInstance() : album_; } } /** * * * <pre> * Required. The [Album][google.photos.types.Album] to update. * The album’s `id` field is used to identify the album to be updated. * The album’s `title` field is used to set the new album title. * The album’s `cover_photo_media_item_id` field is used to set the new album * cover photo. * </pre> * * <code>.google.photos.types.Album album = 1 [(.google.api.field_behavior) = REQUIRED];</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.photos.types.proto.Album, com.google.photos.types.proto.Album.Builder, com.google.photos.types.proto.AlbumOrBuilder> getAlbumFieldBuilder() { if (albumBuilder_ == null) { albumBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.photos.types.proto.Album, com.google.photos.types.proto.Album.Builder, com.google.photos.types.proto.AlbumOrBuilder>( getAlbum(), getParentForChildren(), isClean()); album_ = null; } return albumBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Required. Indicate what fields in the provided album to update. * The only valid values are `title` and `cover_photo_media_item_id`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Indicate what fields in the provided album to update. * The only valid values are `title` and `cover_photo_media_item_id`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Required. Indicate what fields in the provided album to update. * The only valid values are `title` and `cover_photo_media_item_id`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Indicate what fields in the provided album to update. * The only valid values are `title` and `cover_photo_media_item_id`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Indicate what fields in the provided album to update. * The only valid values are `title` and `cover_photo_media_item_id`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Indicate what fields in the provided album to update. * The only valid values are `title` and `cover_photo_media_item_id`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Indicate what fields in the provided album to update. * The only valid values are `title` and `cover_photo_media_item_id`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Indicate what fields in the provided album to update. * The only valid values are `title` and `cover_photo_media_item_id`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Required. Indicate what fields in the provided album to update. * The only valid values are `title` and `cover_photo_media_item_id`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.photos.library.v1.UpdateAlbumRequest) } // @@protoc_insertion_point(class_scope:google.photos.library.v1.UpdateAlbumRequest) private static final com.google.photos.library.v1.proto.UpdateAlbumRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.photos.library.v1.proto.UpdateAlbumRequest(); } public static com.google.photos.library.v1.proto.UpdateAlbumRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateAlbumRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateAlbumRequest>() { @java.lang.Override public UpdateAlbumRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateAlbumRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateAlbumRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.photos.library.v1.proto.UpdateAlbumRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,182
java-dataproc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/NodeInitializationAction.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dataproc/v1/clusters.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dataproc.v1; /** * * * <pre> * Specifies an executable to run on a fully configured node and a * timeout period for executable completion. * </pre> * * Protobuf type {@code google.cloud.dataproc.v1.NodeInitializationAction} */ public final class NodeInitializationAction extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.NodeInitializationAction) NodeInitializationActionOrBuilder { private static final long serialVersionUID = 0L; // Use NodeInitializationAction.newBuilder() to construct. private NodeInitializationAction(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private NodeInitializationAction() { executableFile_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new NodeInitializationAction(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataproc.v1.ClustersProto .internal_static_google_cloud_dataproc_v1_NodeInitializationAction_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataproc.v1.ClustersProto .internal_static_google_cloud_dataproc_v1_NodeInitializationAction_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataproc.v1.NodeInitializationAction.class, com.google.cloud.dataproc.v1.NodeInitializationAction.Builder.class); } private int bitField0_; public static final int EXECUTABLE_FILE_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object executableFile_ = ""; /** * * * <pre> * Required. Cloud Storage URI of executable file. * </pre> * * <code>string executable_file = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The executableFile. */ @java.lang.Override public java.lang.String getExecutableFile() { java.lang.Object ref = executableFile_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); executableFile_ = s; return s; } } /** * * * <pre> * Required. Cloud Storage URI of executable file. * </pre> * * <code>string executable_file = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for executableFile. */ @java.lang.Override public com.google.protobuf.ByteString getExecutableFileBytes() { java.lang.Object ref = executableFile_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); executableFile_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int EXECUTION_TIMEOUT_FIELD_NUMBER = 2; private com.google.protobuf.Duration executionTimeout_; /** * * * <pre> * Optional. Amount of time executable has to complete. Default is * 10 minutes (see JSON representation of * [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). * * Cluster creation fails with an explanatory error message (the * name of the executable that caused the error and the exceeded timeout * period) if the executable is not completed at end of the timeout period. * </pre> * * <code> * .google.protobuf.Duration execution_timeout = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the executionTimeout field is set. */ @java.lang.Override public boolean hasExecutionTimeout() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Optional. Amount of time executable has to complete. Default is * 10 minutes (see JSON representation of * [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). * * Cluster creation fails with an explanatory error message (the * name of the executable that caused the error and the exceeded timeout * period) if the executable is not completed at end of the timeout period. * </pre> * * <code> * .google.protobuf.Duration execution_timeout = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The executionTimeout. */ @java.lang.Override public com.google.protobuf.Duration getExecutionTimeout() { return executionTimeout_ == null ? com.google.protobuf.Duration.getDefaultInstance() : executionTimeout_; } /** * * * <pre> * Optional. Amount of time executable has to complete. Default is * 10 minutes (see JSON representation of * [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). * * Cluster creation fails with an explanatory error message (the * name of the executable that caused the error and the exceeded timeout * period) if the executable is not completed at end of the timeout period. * </pre> * * <code> * .google.protobuf.Duration execution_timeout = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public com.google.protobuf.DurationOrBuilder getExecutionTimeoutOrBuilder() { return executionTimeout_ == null ? com.google.protobuf.Duration.getDefaultInstance() : executionTimeout_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(executableFile_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, executableFile_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getExecutionTimeout()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(executableFile_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, executableFile_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getExecutionTimeout()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dataproc.v1.NodeInitializationAction)) { return super.equals(obj); } com.google.cloud.dataproc.v1.NodeInitializationAction other = (com.google.cloud.dataproc.v1.NodeInitializationAction) obj; if (!getExecutableFile().equals(other.getExecutableFile())) return false; if (hasExecutionTimeout() != other.hasExecutionTimeout()) return false; if (hasExecutionTimeout()) { if (!getExecutionTimeout().equals(other.getExecutionTimeout())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + EXECUTABLE_FILE_FIELD_NUMBER; hash = (53 * hash) + getExecutableFile().hashCode(); if (hasExecutionTimeout()) { hash = (37 * hash) + EXECUTION_TIMEOUT_FIELD_NUMBER; hash = (53 * hash) + getExecutionTimeout().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dataproc.v1.NodeInitializationAction parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataproc.v1.NodeInitializationAction parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataproc.v1.NodeInitializationAction parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataproc.v1.NodeInitializationAction parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataproc.v1.NodeInitializationAction parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataproc.v1.NodeInitializationAction parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataproc.v1.NodeInitializationAction parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataproc.v1.NodeInitializationAction parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataproc.v1.NodeInitializationAction parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dataproc.v1.NodeInitializationAction parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataproc.v1.NodeInitializationAction parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataproc.v1.NodeInitializationAction parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.dataproc.v1.NodeInitializationAction prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Specifies an executable to run on a fully configured node and a * timeout period for executable completion. * </pre> * * Protobuf type {@code google.cloud.dataproc.v1.NodeInitializationAction} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.NodeInitializationAction) com.google.cloud.dataproc.v1.NodeInitializationActionOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataproc.v1.ClustersProto .internal_static_google_cloud_dataproc_v1_NodeInitializationAction_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataproc.v1.ClustersProto .internal_static_google_cloud_dataproc_v1_NodeInitializationAction_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataproc.v1.NodeInitializationAction.class, com.google.cloud.dataproc.v1.NodeInitializationAction.Builder.class); } // Construct using com.google.cloud.dataproc.v1.NodeInitializationAction.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getExecutionTimeoutFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; executableFile_ = ""; executionTimeout_ = null; if (executionTimeoutBuilder_ != null) { executionTimeoutBuilder_.dispose(); executionTimeoutBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dataproc.v1.ClustersProto .internal_static_google_cloud_dataproc_v1_NodeInitializationAction_descriptor; } @java.lang.Override public com.google.cloud.dataproc.v1.NodeInitializationAction getDefaultInstanceForType() { return com.google.cloud.dataproc.v1.NodeInitializationAction.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dataproc.v1.NodeInitializationAction build() { com.google.cloud.dataproc.v1.NodeInitializationAction result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dataproc.v1.NodeInitializationAction buildPartial() { com.google.cloud.dataproc.v1.NodeInitializationAction result = new com.google.cloud.dataproc.v1.NodeInitializationAction(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.dataproc.v1.NodeInitializationAction result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.executableFile_ = executableFile_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.executionTimeout_ = executionTimeoutBuilder_ == null ? executionTimeout_ : executionTimeoutBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dataproc.v1.NodeInitializationAction) { return mergeFrom((com.google.cloud.dataproc.v1.NodeInitializationAction) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dataproc.v1.NodeInitializationAction other) { if (other == com.google.cloud.dataproc.v1.NodeInitializationAction.getDefaultInstance()) return this; if (!other.getExecutableFile().isEmpty()) { executableFile_ = other.executableFile_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasExecutionTimeout()) { mergeExecutionTimeout(other.getExecutionTimeout()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { executableFile_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getExecutionTimeoutFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object executableFile_ = ""; /** * * * <pre> * Required. Cloud Storage URI of executable file. * </pre> * * <code>string executable_file = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The executableFile. */ public java.lang.String getExecutableFile() { java.lang.Object ref = executableFile_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); executableFile_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Cloud Storage URI of executable file. * </pre> * * <code>string executable_file = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for executableFile. */ public com.google.protobuf.ByteString getExecutableFileBytes() { java.lang.Object ref = executableFile_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); executableFile_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Cloud Storage URI of executable file. * </pre> * * <code>string executable_file = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The executableFile to set. * @return This builder for chaining. */ public Builder setExecutableFile(java.lang.String value) { if (value == null) { throw new NullPointerException(); } executableFile_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Cloud Storage URI of executable file. * </pre> * * <code>string executable_file = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearExecutableFile() { executableFile_ = getDefaultInstance().getExecutableFile(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. Cloud Storage URI of executable file. * </pre> * * <code>string executable_file = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for executableFile to set. * @return This builder for chaining. */ public Builder setExecutableFileBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); executableFile_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.protobuf.Duration executionTimeout_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> executionTimeoutBuilder_; /** * * * <pre> * Optional. Amount of time executable has to complete. Default is * 10 minutes (see JSON representation of * [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). * * Cluster creation fails with an explanatory error message (the * name of the executable that caused the error and the exceeded timeout * period) if the executable is not completed at end of the timeout period. * </pre> * * <code> * .google.protobuf.Duration execution_timeout = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the executionTimeout field is set. */ public boolean hasExecutionTimeout() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. Amount of time executable has to complete. Default is * 10 minutes (see JSON representation of * [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). * * Cluster creation fails with an explanatory error message (the * name of the executable that caused the error and the exceeded timeout * period) if the executable is not completed at end of the timeout period. * </pre> * * <code> * .google.protobuf.Duration execution_timeout = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The executionTimeout. */ public com.google.protobuf.Duration getExecutionTimeout() { if (executionTimeoutBuilder_ == null) { return executionTimeout_ == null ? com.google.protobuf.Duration.getDefaultInstance() : executionTimeout_; } else { return executionTimeoutBuilder_.getMessage(); } } /** * * * <pre> * Optional. Amount of time executable has to complete. Default is * 10 minutes (see JSON representation of * [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). * * Cluster creation fails with an explanatory error message (the * name of the executable that caused the error and the exceeded timeout * period) if the executable is not completed at end of the timeout period. * </pre> * * <code> * .google.protobuf.Duration execution_timeout = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setExecutionTimeout(com.google.protobuf.Duration value) { if (executionTimeoutBuilder_ == null) { if (value == null) { throw new NullPointerException(); } executionTimeout_ = value; } else { executionTimeoutBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Amount of time executable has to complete. Default is * 10 minutes (see JSON representation of * [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). * * Cluster creation fails with an explanatory error message (the * name of the executable that caused the error and the exceeded timeout * period) if the executable is not completed at end of the timeout period. * </pre> * * <code> * .google.protobuf.Duration execution_timeout = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setExecutionTimeout(com.google.protobuf.Duration.Builder builderForValue) { if (executionTimeoutBuilder_ == null) { executionTimeout_ = builderForValue.build(); } else { executionTimeoutBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Amount of time executable has to complete. Default is * 10 minutes (see JSON representation of * [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). * * Cluster creation fails with an explanatory error message (the * name of the executable that caused the error and the exceeded timeout * period) if the executable is not completed at end of the timeout period. * </pre> * * <code> * .google.protobuf.Duration execution_timeout = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder mergeExecutionTimeout(com.google.protobuf.Duration value) { if (executionTimeoutBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && executionTimeout_ != null && executionTimeout_ != com.google.protobuf.Duration.getDefaultInstance()) { getExecutionTimeoutBuilder().mergeFrom(value); } else { executionTimeout_ = value; } } else { executionTimeoutBuilder_.mergeFrom(value); } if (executionTimeout_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Optional. Amount of time executable has to complete. Default is * 10 minutes (see JSON representation of * [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). * * Cluster creation fails with an explanatory error message (the * name of the executable that caused the error and the exceeded timeout * period) if the executable is not completed at end of the timeout period. * </pre> * * <code> * .google.protobuf.Duration execution_timeout = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder clearExecutionTimeout() { bitField0_ = (bitField0_ & ~0x00000002); executionTimeout_ = null; if (executionTimeoutBuilder_ != null) { executionTimeoutBuilder_.dispose(); executionTimeoutBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Optional. Amount of time executable has to complete. Default is * 10 minutes (see JSON representation of * [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). * * Cluster creation fails with an explanatory error message (the * name of the executable that caused the error and the exceeded timeout * period) if the executable is not completed at end of the timeout period. * </pre> * * <code> * .google.protobuf.Duration execution_timeout = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.Duration.Builder getExecutionTimeoutBuilder() { bitField0_ |= 0x00000002; onChanged(); return getExecutionTimeoutFieldBuilder().getBuilder(); } /** * * * <pre> * Optional. Amount of time executable has to complete. Default is * 10 minutes (see JSON representation of * [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). * * Cluster creation fails with an explanatory error message (the * name of the executable that caused the error and the exceeded timeout * period) if the executable is not completed at end of the timeout period. * </pre> * * <code> * .google.protobuf.Duration execution_timeout = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.DurationOrBuilder getExecutionTimeoutOrBuilder() { if (executionTimeoutBuilder_ != null) { return executionTimeoutBuilder_.getMessageOrBuilder(); } else { return executionTimeout_ == null ? com.google.protobuf.Duration.getDefaultInstance() : executionTimeout_; } } /** * * * <pre> * Optional. Amount of time executable has to complete. Default is * 10 minutes (see JSON representation of * [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). * * Cluster creation fails with an explanatory error message (the * name of the executable that caused the error and the exceeded timeout * period) if the executable is not completed at end of the timeout period. * </pre> * * <code> * .google.protobuf.Duration execution_timeout = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> getExecutionTimeoutFieldBuilder() { if (executionTimeoutBuilder_ == null) { executionTimeoutBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder>( getExecutionTimeout(), getParentForChildren(), isClean()); executionTimeout_ = null; } return executionTimeoutBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.NodeInitializationAction) } // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.NodeInitializationAction) private static final com.google.cloud.dataproc.v1.NodeInitializationAction DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.NodeInitializationAction(); } public static com.google.cloud.dataproc.v1.NodeInitializationAction getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<NodeInitializationAction> PARSER = new com.google.protobuf.AbstractParser<NodeInitializationAction>() { @java.lang.Override public NodeInitializationAction parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<NodeInitializationAction> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<NodeInitializationAction> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dataproc.v1.NodeInitializationAction getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/tinkerpop
35,622
spark-gremlin/src/main/java/org/apache/tinkerpop/gremlin/spark/process/computer/SparkGraphComputer.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.tinkerpop.gremlin.spark.process.computer; import org.apache.commons.configuration2.ConfigurationUtils; import org.apache.commons.configuration2.builder.fluent.Configurations; import org.apache.commons.lang3.concurrent.BasicThreadFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.spark.HashPartitioner; import org.apache.spark.Partitioner; import org.apache.spark.api.java.JavaPairRDD; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.launcher.SparkLauncher; import org.apache.spark.serializer.KryoRegistrator; import org.apache.spark.serializer.KryoSerializer; import org.apache.spark.serializer.Serializer; import org.apache.spark.storage.StorageLevel; import org.apache.tinkerpop.gremlin.hadoop.Constants; import org.apache.tinkerpop.gremlin.hadoop.process.computer.AbstractHadoopGraphComputer; import org.apache.tinkerpop.gremlin.hadoop.process.computer.util.ComputerSubmissionHelper; import org.apache.tinkerpop.gremlin.hadoop.structure.HadoopConfiguration; import org.apache.tinkerpop.gremlin.hadoop.structure.HadoopGraph; import org.apache.tinkerpop.gremlin.hadoop.structure.io.FileSystemStorage; import org.apache.tinkerpop.gremlin.hadoop.structure.io.GraphFilterAware; import org.apache.tinkerpop.gremlin.hadoop.structure.io.HadoopPoolShimService; import org.apache.tinkerpop.gremlin.hadoop.structure.io.VertexWritable; import org.apache.tinkerpop.gremlin.hadoop.structure.util.ConfUtil; import org.apache.tinkerpop.gremlin.process.computer.ComputerResult; import org.apache.tinkerpop.gremlin.process.computer.GraphComputer; import org.apache.tinkerpop.gremlin.process.computer.MapReduce; import org.apache.tinkerpop.gremlin.process.computer.Memory; import org.apache.tinkerpop.gremlin.process.computer.VertexProgram; import org.apache.tinkerpop.gremlin.process.computer.clone.CloneVertexProgram; import org.apache.tinkerpop.gremlin.process.computer.util.DefaultComputerResult; import org.apache.tinkerpop.gremlin.process.computer.util.MapMemory; import org.apache.tinkerpop.gremlin.process.traversal.TraversalStrategies; import org.apache.tinkerpop.gremlin.process.traversal.util.TraversalInterruptedException; import org.apache.tinkerpop.gremlin.spark.process.computer.payload.ViewIncomingPayload; import org.apache.tinkerpop.gremlin.spark.process.computer.traversal.strategy.SparkVertexProgramInterceptor; import org.apache.tinkerpop.gremlin.spark.process.computer.traversal.strategy.optimization.SparkInterceptorStrategy; import org.apache.tinkerpop.gremlin.spark.process.computer.traversal.strategy.optimization.SparkSingleIterationStrategy; import org.apache.tinkerpop.gremlin.spark.process.computer.traversal.strategy.optimization.interceptor.SparkCloneVertexProgramInterceptor; import org.apache.tinkerpop.gremlin.spark.structure.Spark; import org.apache.tinkerpop.gremlin.spark.structure.io.InputFormatRDD; import org.apache.tinkerpop.gremlin.spark.structure.io.InputOutputHelper; import org.apache.tinkerpop.gremlin.spark.structure.io.InputRDD; import org.apache.tinkerpop.gremlin.spark.structure.io.OutputFormatRDD; import org.apache.tinkerpop.gremlin.spark.structure.io.OutputRDD; import org.apache.tinkerpop.gremlin.spark.structure.io.PersistedInputRDD; import org.apache.tinkerpop.gremlin.spark.structure.io.PersistedOutputRDD; import org.apache.tinkerpop.gremlin.spark.structure.io.SparkContextStorage; import org.apache.tinkerpop.gremlin.spark.structure.io.SparkIOUtil; import org.apache.tinkerpop.gremlin.spark.structure.io.gryo.GryoRegistrator; import org.apache.tinkerpop.gremlin.spark.structure.io.gryo.kryoshim.unshaded.UnshadedKryoShimService; import org.apache.tinkerpop.gremlin.structure.Direction; import org.apache.tinkerpop.gremlin.structure.io.IoRegistry; import org.apache.tinkerpop.gremlin.structure.io.Storage; import org.apache.tinkerpop.gremlin.structure.io.gryo.kryoshim.KryoShimServiceLoader; import java.io.File; import java.io.IOException; import java.util.Arrays; import java.util.HashSet; import java.util.Set; import java.util.concurrent.Executor; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.ThreadFactory; import static org.apache.tinkerpop.gremlin.hadoop.Constants.GREMLIN_SPARK_GRAPH_STORAGE_LEVEL; import static org.apache.tinkerpop.gremlin.hadoop.Constants.GREMLIN_SPARK_PERSIST_CONTEXT; import static org.apache.tinkerpop.gremlin.hadoop.Constants.GREMLIN_SPARK_PERSIST_STORAGE_LEVEL; import static org.apache.tinkerpop.gremlin.hadoop.Constants.GREMLIN_SPARK_SKIP_GRAPH_CACHE; import static org.apache.tinkerpop.gremlin.hadoop.Constants.GREMLIN_SPARK_SKIP_PARTITIONER; import static org.apache.tinkerpop.gremlin.hadoop.Constants.SPARK_KRYO_REGISTRATION_REQUIRED; import static org.apache.tinkerpop.gremlin.hadoop.Constants.SPARK_SERIALIZER; /** * {@link GraphComputer} implementation for Apache Spark. * * @author Marko A. Rodriguez (http://markorodriguez.com) */ public final class SparkGraphComputer extends AbstractHadoopGraphComputer { private final org.apache.commons.configuration2.Configuration sparkConfiguration; private boolean workersSet = false; private final ThreadFactory threadFactoryBoss = new BasicThreadFactory.Builder().namingPattern(SparkGraphComputer.class.getSimpleName() + "-boss").build(); private static final Set<String> KEYS_PASSED_IN_JVM_SYSTEM_PROPERTIES = new HashSet<>(Arrays.asList( KryoShimServiceLoader.KRYO_SHIM_SERVICE, IoRegistry.IO_REGISTRY)); /** * An {@code ExecutorService} that schedules up background work. Since a {@link GraphComputer} is only used once * for a {@link VertexProgram} a single threaded executor is sufficient. */ private final ExecutorService computerService = Executors.newSingleThreadExecutor(threadFactoryBoss); static { TraversalStrategies.GlobalCache.registerStrategies(SparkGraphComputer.class, TraversalStrategies.GlobalCache.getStrategies(GraphComputer.class).clone().addStrategies( SparkSingleIterationStrategy.instance(), SparkInterceptorStrategy.instance())); } public SparkGraphComputer(final HadoopGraph hadoopGraph) { super(hadoopGraph); this.sparkConfiguration = new HadoopConfiguration(); } /** * Sets the number of workers. If the {@code spark.master} configuration is configured with "local" then it will * change that configuration to use the specified number of worker threads. */ @Override public SparkGraphComputer workers(final int workers) { super.workers(workers); if (this.sparkConfiguration.containsKey(SparkLauncher.SPARK_MASTER) && this.sparkConfiguration.getString(SparkLauncher.SPARK_MASTER).startsWith("local")) { this.sparkConfiguration.setProperty(SparkLauncher.SPARK_MASTER, "local[" + this.workers + "]"); } this.workersSet = true; return this; } @Override public SparkGraphComputer configure(final String key, final Object value) { this.sparkConfiguration.setProperty(key, value); return this; } /** * Sets the configuration option for {@code spark.master} which is the cluster manager to connect to which may be * one of the <a href="https://spark.apache.org/docs/latest/submitting-applications.html#master-urls">allowed master URLs</a>. */ public SparkGraphComputer master(final String clusterManager) { return configure(SparkLauncher.SPARK_MASTER, clusterManager); } /** * Determines if the Spark context should be left open preventing Spark from garbage collecting unreferenced RDDs. */ public SparkGraphComputer persistContext(final boolean persist) { return configure(GREMLIN_SPARK_PERSIST_CONTEXT, persist); } /** * Specifies the method by which the {@link VertexProgram} created graph is persisted. By default, it is configured * to use {@code StorageLevel#MEMORY_ONLY()} */ public SparkGraphComputer graphStorageLevel(final StorageLevel storageLevel) { return configure(GREMLIN_SPARK_GRAPH_STORAGE_LEVEL, storageLevel.description()); } public SparkGraphComputer persistStorageLevel(final StorageLevel storageLevel) { return configure(GREMLIN_SPARK_PERSIST_STORAGE_LEVEL, storageLevel.description()); } /** * Determines if the graph RDD should be partitioned or not. By default, this value is {@code false}. */ public SparkGraphComputer skipPartitioner(final boolean skip) { return configure(GREMLIN_SPARK_SKIP_PARTITIONER, skip); } /** * Determines if the graph RDD should be cached or not. If {@code true} then * {@link #graphStorageLevel(StorageLevel)} is ignored. By default, this value is {@code false}. */ public SparkGraphComputer skipGraphCache(final boolean skip) { return configure(GREMLIN_SPARK_SKIP_GRAPH_CACHE, skip); } /** * Specifies the {@code org.apache.spark.serializer.Serializer} implementation to use. By default, this value is * set to {@code org.apache.spark.serializer.KryoSerializer}. */ public SparkGraphComputer serializer(final Class<? extends Serializer> serializer) { return configure(SPARK_SERIALIZER, serializer.getCanonicalName()); } /** * Specifies the {@code org.apache.spark.serializer.KryoRegistrator} to use to install additional types. By * default this value is set to TinkerPop's {@link GryoRegistrator}. */ public SparkGraphComputer sparkKryoRegistrator(final Class<? extends KryoRegistrator> registrator) { return configure(Constants.SPARK_KRYO_REGISTRATOR, registrator.getCanonicalName()); } /** * Determines if kryo registration is required such that attempts to serialize classes that are not registered * will result in an error. By default this value is {@code false}. */ public SparkGraphComputer kryoRegistrationRequired(final boolean required) { return configure(SPARK_KRYO_REGISTRATION_REQUIRED, required); } @Override public Future<ComputerResult> submit() { this.validateStatePriorToExecution(); return ComputerSubmissionHelper.runWithBackgroundThread(this::submitWithExecutor, "SparkSubmitter"); } private Future<ComputerResult> submitWithExecutor(Executor exec) { // create the completable future final Future<ComputerResult> result = computerService.submit(() -> { final long startTime = System.currentTimeMillis(); ////////////////////////////////////////////////// /////// PROCESS SHIM AND SYSTEM PROPERTIES /////// ////////////////////////////////////////////////// ConfigurationUtils.copy(this.hadoopGraph.configuration(), this.sparkConfiguration); final String shimService = KryoSerializer.class.getCanonicalName().equals(this.sparkConfiguration.getString(Constants.SPARK_SERIALIZER, null)) ? UnshadedKryoShimService.class.getCanonicalName() : HadoopPoolShimService.class.getCanonicalName(); this.sparkConfiguration.setProperty(KryoShimServiceLoader.KRYO_SHIM_SERVICE, shimService); /////////// final StringBuilder params = new StringBuilder(); this.sparkConfiguration.getKeys().forEachRemaining(key -> { if (KEYS_PASSED_IN_JVM_SYSTEM_PROPERTIES.contains(key)) { params.append(" -D").append("tinkerpop.").append(key).append("=").append(this.sparkConfiguration.getProperty(key)); System.setProperty("tinkerpop." + key, this.sparkConfiguration.getProperty(key).toString()); } }); if (params.length() > 0) { this.sparkConfiguration.setProperty(SparkLauncher.EXECUTOR_EXTRA_JAVA_OPTIONS, (this.sparkConfiguration.getString(SparkLauncher.EXECUTOR_EXTRA_JAVA_OPTIONS, "") + params.toString()).trim()); this.sparkConfiguration.setProperty(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS, (this.sparkConfiguration.getString(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS, "") + params.toString()).trim()); } KryoShimServiceLoader.applyConfiguration(this.sparkConfiguration); ////////////////////////////////////////////////// ////////////////////////////////////////////////// ////////////////////////////////////////////////// // apache and hadoop configurations that are used throughout the graph computer computation final org.apache.commons.configuration2.Configuration graphComputerConfiguration = new HadoopConfiguration(this.sparkConfiguration); if (!graphComputerConfiguration.containsKey(Constants.SPARK_SERIALIZER)) { graphComputerConfiguration.setProperty(Constants.SPARK_SERIALIZER, KryoSerializer.class.getCanonicalName()); if (!graphComputerConfiguration.containsKey(Constants.SPARK_KRYO_REGISTRATOR)) graphComputerConfiguration.setProperty(Constants.SPARK_KRYO_REGISTRATOR, GryoRegistrator.class.getCanonicalName()); } graphComputerConfiguration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_WRITER_HAS_EDGES, this.persist.equals(GraphComputer.Persist.EDGES)); final Configuration hadoopConfiguration = ConfUtil.makeHadoopConfiguration(graphComputerConfiguration); final Storage fileSystemStorage = FileSystemStorage.open(hadoopConfiguration); final boolean inputFromHDFS = FileInputFormat.class.isAssignableFrom(hadoopConfiguration.getClass(Constants.GREMLIN_HADOOP_GRAPH_READER, Object.class)); final boolean inputFromSpark = PersistedInputRDD.class.isAssignableFrom(hadoopConfiguration.getClass(Constants.GREMLIN_HADOOP_GRAPH_READER, Object.class)); final boolean outputToHDFS = FileOutputFormat.class.isAssignableFrom(hadoopConfiguration.getClass(Constants.GREMLIN_HADOOP_GRAPH_WRITER, Object.class)); final boolean outputToSpark = PersistedOutputRDD.class.isAssignableFrom(hadoopConfiguration.getClass(Constants.GREMLIN_HADOOP_GRAPH_WRITER, Object.class)); final boolean skipPartitioner = graphComputerConfiguration.getBoolean(GREMLIN_SPARK_SKIP_PARTITIONER, false); final boolean skipPersist = graphComputerConfiguration.getBoolean(GREMLIN_SPARK_SKIP_GRAPH_CACHE, false); if (inputFromHDFS) { String inputLocation = Constants .getSearchGraphLocation(hadoopConfiguration.get(Constants.GREMLIN_HADOOP_INPUT_LOCATION), fileSystemStorage).orElse(null); if (null != inputLocation) { try { graphComputerConfiguration.setProperty(Constants.MAPREDUCE_INPUT_FILEINPUTFORMAT_INPUTDIR, FileSystem.get(hadoopConfiguration).getFileStatus(new Path(inputLocation)).getPath() .toString()); hadoopConfiguration.set(Constants.MAPREDUCE_INPUT_FILEINPUTFORMAT_INPUTDIR, FileSystem.get(hadoopConfiguration).getFileStatus(new Path(inputLocation)).getPath() .toString()); } catch (final IOException e) { throw new IllegalStateException(e.getMessage(), e); } } } final InputRDD inputRDD = SparkIOUtil.createInputRDD(hadoopConfiguration); final boolean filtered; // if the input class can filter on load, then set the filters if (inputRDD instanceof InputFormatRDD && GraphFilterAware.class.isAssignableFrom(hadoopConfiguration.getClass(Constants.GREMLIN_HADOOP_GRAPH_READER, InputFormat.class, InputFormat.class))) { GraphFilterAware.storeGraphFilter(graphComputerConfiguration, hadoopConfiguration, this.graphFilter); filtered = false; } else if (inputRDD instanceof GraphFilterAware) { ((GraphFilterAware) inputRDD).setGraphFilter(this.graphFilter); filtered = false; } else if (this.graphFilter.hasFilter()) { filtered = true; } else { filtered = false; } final OutputRDD outputRDD; try { outputRDD = OutputRDD.class.isAssignableFrom(hadoopConfiguration.getClass(Constants.GREMLIN_HADOOP_GRAPH_WRITER, Object.class)) ? hadoopConfiguration.getClass(Constants.GREMLIN_HADOOP_GRAPH_WRITER, OutputRDD.class, OutputRDD.class).newInstance() : OutputFormatRDD.class.newInstance(); } catch (final InstantiationException | IllegalAccessException e) { throw new IllegalStateException(e.getMessage(), e); } // create the spark context from the graph computer configuration final JavaSparkContext sparkContext = new JavaSparkContext(Spark.create(hadoopConfiguration)); final Storage sparkContextStorage = SparkContextStorage.open(); SparkMemory memory = null; // delete output location final boolean dontDeleteNonEmptyOutput = graphComputerConfiguration.getBoolean(Constants.GREMLIN_SPARK_DONT_DELETE_NON_EMPTY_OUTPUT, false); final String outputLocation = hadoopConfiguration.get(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, null); if (null != outputLocation) { if (outputToHDFS && fileSystemStorage.exists(outputLocation)) { if (dontDeleteNonEmptyOutput) { // DON'T delete the content if the folder is not empty if (fileSystemStorage.ls(outputLocation).size() == 0) { fileSystemStorage.rm(outputLocation); } else { throw new IllegalStateException("The output location '" + outputLocation + "' is not empty"); } } else { fileSystemStorage.rm(outputLocation); } } if (outputToSpark && sparkContextStorage.exists(outputLocation)) sparkContextStorage.rm(outputLocation); } // the Spark application name will always be set by SparkContextStorage, thus, INFO the name to make it easier to debug logger.debug(Constants.GREMLIN_HADOOP_SPARK_JOB_PREFIX + (null == this.vertexProgram ? "No VertexProgram" : this.vertexProgram) + "[" + this.mapReducers + "]"); // execute the vertex program and map reducers and if there is a failure, auto-close the spark context try { this.loadJars(hadoopConfiguration, sparkContext); // add the project jars to the cluster updateLocalConfiguration(sparkContext, hadoopConfiguration); // create a message-passing friendly rdd from the input rdd boolean partitioned = false; JavaPairRDD<Object, VertexWritable> loadedGraphRDD = SparkIOUtil.loadVertices(inputRDD, graphComputerConfiguration, sparkContext); // if there are vertex or edge filters, filter the loaded graph rdd prior to partitioning and persisting if (filtered) { this.logger.debug("Filtering the loaded graphRDD: " + this.graphFilter); loadedGraphRDD = SparkExecutor.applyGraphFilter(loadedGraphRDD, this.graphFilter); } // if the loaded graph RDD is already partitioned use that partitioner, else partition it with HashPartitioner if (loadedGraphRDD.partitioner().isPresent()) this.logger.debug("Using the existing partitioner associated with the loaded graphRDD: " + loadedGraphRDD.partitioner().get()); else { if (!skipPartitioner) { final Partitioner partitioner = new HashPartitioner(this.workersSet ? this.workers : loadedGraphRDD.partitions().size()); this.logger.debug("Partitioning the loaded graphRDD: " + partitioner); loadedGraphRDD = loadedGraphRDD.partitionBy(partitioner); partitioned = true; assert loadedGraphRDD.partitioner().isPresent(); } else { assert skipPartitioner == !loadedGraphRDD.partitioner().isPresent(); // no easy way to test this with a test case this.logger.debug("Partitioning has been skipped for the loaded graphRDD via " + GREMLIN_SPARK_SKIP_PARTITIONER); } } // if the loaded graphRDD was already partitioned previous, then this coalesce/repartition will not take place if (this.workersSet) { if (loadedGraphRDD.partitions().size() > this.workers) // ensures that the loaded graphRDD does not have more partitions than workers loadedGraphRDD = loadedGraphRDD.coalesce(this.workers); else if (loadedGraphRDD.partitions().size() < this.workers) // ensures that the loaded graphRDD does not have less partitions than workers loadedGraphRDD = loadedGraphRDD.repartition(this.workers); } // persist the vertex program loaded graph as specified by configuration or else use default cache() which is MEMORY_ONLY if (!skipPersist && (!inputFromSpark || partitioned || filtered)) loadedGraphRDD = loadedGraphRDD.persist(StorageLevel.fromString(hadoopConfiguration.get(GREMLIN_SPARK_GRAPH_STORAGE_LEVEL, "MEMORY_ONLY"))); // final graph with view (for persisting and/or mapReducing -- may be null and thus, possible to save space/time) JavaPairRDD<Object, VertexWritable> computedGraphRDD = null; //////////////////////////////// // process the vertex program // //////////////////////////////// if (null != this.vertexProgram) { memory = new SparkMemory(this.vertexProgram, this.mapReducers, sparkContext); // build a shortcut (which reduces the total Spark stages from 3 to 2) for CloneVertexProgram since it does nothing // and this improves the overall performance a lot if (this.vertexProgram.getClass().equals(CloneVertexProgram.class) && !graphComputerConfiguration.containsKey(Constants.GREMLIN_HADOOP_VERTEX_PROGRAM_INTERCEPTOR)) { graphComputerConfiguration.setProperty(Constants.GREMLIN_HADOOP_VERTEX_PROGRAM_INTERCEPTOR, SparkCloneVertexProgramInterceptor.class.getName()); } ///////////////// // if there is a registered VertexProgramInterceptor, use it to bypass the GraphComputer semantics if (graphComputerConfiguration.containsKey(Constants.GREMLIN_HADOOP_VERTEX_PROGRAM_INTERCEPTOR)) { try { final SparkVertexProgramInterceptor<VertexProgram> interceptor = (SparkVertexProgramInterceptor) Class.forName(graphComputerConfiguration.getString(Constants.GREMLIN_HADOOP_VERTEX_PROGRAM_INTERCEPTOR)).newInstance(); computedGraphRDD = interceptor.apply(this.vertexProgram, loadedGraphRDD, memory); } catch (final ClassNotFoundException | IllegalAccessException | InstantiationException e) { throw new IllegalStateException(e.getMessage()); } } else { // standard GraphComputer semantics // get a configuration that will be propagated to all workers final HadoopConfiguration vertexProgramConfiguration = new HadoopConfiguration(); this.vertexProgram.storeState(vertexProgramConfiguration); // set up the vertex program and wire up configurations this.vertexProgram.setup(memory); JavaPairRDD<Object, ViewIncomingPayload<Object>> viewIncomingRDD = null; memory.broadcastMemory(sparkContext); // execute the vertex program while (true) { if (Thread.interrupted()) { sparkContext.cancelAllJobs(); throw new TraversalInterruptedException(); } memory.setInExecute(true); viewIncomingRDD = SparkExecutor.executeVertexProgramIteration(loadedGraphRDD, viewIncomingRDD, memory, graphComputerConfiguration, vertexProgramConfiguration); memory.setInExecute(false); if (this.vertexProgram.terminate(memory)) break; else { memory.incrIteration(); memory.broadcastMemory(sparkContext); } } // if the graph will be continued to be used (persisted or mapreduced), then generate a view+graph if ((null != outputRDD && !this.persist.equals(Persist.NOTHING)) || !this.mapReducers.isEmpty()) { computedGraphRDD = SparkExecutor.prepareFinalGraphRDD(loadedGraphRDD, viewIncomingRDD, this.vertexProgram.getVertexComputeKeys()); assert null != computedGraphRDD && computedGraphRDD != loadedGraphRDD; } else { // ensure that the computedGraphRDD was not created assert null == computedGraphRDD; } } ///////////////// memory.complete(); // drop all transient memory keys // write the computed graph to the respective output (rdd or output format) if (null != outputRDD && !this.persist.equals(Persist.NOTHING)) { assert null != computedGraphRDD; // the logic holds that a computeGraphRDD must be created at this point outputRDD.writeGraphRDD(graphComputerConfiguration, computedGraphRDD); } } final boolean computedGraphCreated = computedGraphRDD != null && computedGraphRDD != loadedGraphRDD; if (!computedGraphCreated) computedGraphRDD = loadedGraphRDD; final Memory.Admin finalMemory = null == memory ? new MapMemory() : new MapMemory(memory); ////////////////////////////// // process the map reducers // ////////////////////////////// if (!this.mapReducers.isEmpty()) { // create a mapReduceRDD for executing the map reduce jobs on JavaPairRDD<Object, VertexWritable> mapReduceRDD = computedGraphRDD; if (computedGraphCreated && !outputToSpark) { // drop all the edges of the graph as they are not used in mapReduce processing mapReduceRDD = computedGraphRDD.mapValues(vertexWritable -> { vertexWritable.get().dropEdges(Direction.BOTH); return vertexWritable; }); // if there is only one MapReduce to execute, don't bother wasting the clock cycles. if (this.mapReducers.size() > 1) mapReduceRDD = mapReduceRDD.persist(StorageLevel.fromString(hadoopConfiguration.get(GREMLIN_SPARK_GRAPH_STORAGE_LEVEL, "MEMORY_ONLY"))); } for (final MapReduce mapReduce : this.mapReducers) { // execute the map reduce job final HadoopConfiguration newApacheConfiguration = new HadoopConfiguration(graphComputerConfiguration); mapReduce.storeState(newApacheConfiguration); // map final JavaPairRDD mapRDD = SparkExecutor.executeMap((JavaPairRDD) mapReduceRDD, mapReduce, newApacheConfiguration); // combine final JavaPairRDD combineRDD = mapReduce.doStage(MapReduce.Stage.COMBINE) ? SparkExecutor.executeCombine(mapRDD, newApacheConfiguration) : mapRDD; // reduce final JavaPairRDD reduceRDD = mapReduce.doStage(MapReduce.Stage.REDUCE) ? SparkExecutor.executeReduce(combineRDD, mapReduce, newApacheConfiguration) : combineRDD; // write the map reduce output back to disk and computer result memory if (null != outputRDD) mapReduce.addResultToMemory(finalMemory, outputRDD.writeMemoryRDD(graphComputerConfiguration, mapReduce.getMemoryKey(), reduceRDD)); } // if the mapReduceRDD is not simply the computed graph, unpersist the mapReduceRDD if (computedGraphCreated && !outputToSpark) { assert loadedGraphRDD != computedGraphRDD; assert mapReduceRDD != computedGraphRDD; mapReduceRDD.unpersist(); } else { assert mapReduceRDD == computedGraphRDD; } } // unpersist the loaded graph if it will not be used again (no PersistedInputRDD) // if the graphRDD was loaded from Spark, but then partitioned or filtered, its a different RDD if (!inputFromSpark || partitioned || filtered) loadedGraphRDD.unpersist(); // unpersist the computed graph if it will not be used again (no PersistedOutputRDD) // if the computed graph is the loadedGraphRDD because it was not mutated and not-unpersisted, then don't unpersist the computedGraphRDD/loadedGraphRDD if ((!outputToSpark || this.persist.equals(GraphComputer.Persist.NOTHING)) && computedGraphCreated) computedGraphRDD.unpersist(); // delete any file system or rdd data if persist nothing if (null != outputLocation && this.persist.equals(GraphComputer.Persist.NOTHING)) { if (outputToHDFS) fileSystemStorage.rm(outputLocation); if (outputToSpark) sparkContextStorage.rm(outputLocation); } // update runtime and return the newly computed graph finalMemory.setRuntime(System.currentTimeMillis() - startTime); // clear properties that should not be propagated in an OLAP chain graphComputerConfiguration.clearProperty(Constants.GREMLIN_HADOOP_GRAPH_FILTER); graphComputerConfiguration.clearProperty(Constants.GREMLIN_HADOOP_VERTEX_PROGRAM_INTERCEPTOR); graphComputerConfiguration.clearProperty(GREMLIN_SPARK_SKIP_GRAPH_CACHE); graphComputerConfiguration.clearProperty(GREMLIN_SPARK_SKIP_PARTITIONER); return new DefaultComputerResult(InputOutputHelper.getOutputGraph(graphComputerConfiguration, this.resultGraph, this.persist), finalMemory.asImmutable()); } finally { if (!graphComputerConfiguration.getBoolean(GREMLIN_SPARK_PERSIST_CONTEXT, false)) Spark.close(); } }); computerService.shutdown(); return result; } ///////////////// @Override protected void loadJar(final Configuration hadoopConfiguration, final File file, final Object... params) { final JavaSparkContext sparkContext = (JavaSparkContext) params[0]; sparkContext.addJar(file.getAbsolutePath()); } /** * When using a persistent context the running Context's configuration will override a passed * in configuration. Spark allows us to override these inherited properties via * SparkContext.setLocalProperty */ private void updateLocalConfiguration(final JavaSparkContext sparkContext, final Configuration configuration) { /* * While we could enumerate over the entire SparkConfiguration and copy into the Thread * Local properties of the Spark Context this could cause adverse effects with future * versions of Spark. Since the api for setting multiple local properties at once is * restricted as private, we will only set those properties we know can effect SparkGraphComputer * Execution rather than applying the entire configuration. */ final String[] validPropertyNames = { "spark.job.description", "spark.jobGroup.id", "spark.job.interruptOnCancel", "spark.scheduler.pool" }; for (String propertyName : validPropertyNames) { String propertyValue = configuration.get(propertyName); if (propertyValue != null) { this.logger.info("Setting Thread Local SparkContext Property - " + propertyName + " : " + propertyValue); sparkContext.setLocalProperty(propertyName, configuration.get(propertyName)); } } } public static void main(final String[] args) throws Exception { final Configurations configs = new Configurations(); final org.apache.commons.configuration2.Configuration configuration = configs.properties(args[0]); new SparkGraphComputer(HadoopGraph.open(configuration)).program(VertexProgram.createVertexProgram(HadoopGraph.open(configuration), configuration)).submit().get(); } }
googleapis/google-cloud-java
35,107
java-shopping-merchant-inventories/proto-google-shopping-merchant-inventories-v1beta/src/main/java/com/google/shopping/merchant/inventories/v1beta/ListRegionalInventoriesRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/shopping/merchant/inventories/v1beta/regionalinventory.proto // Protobuf Java Version: 3.25.8 package com.google.shopping.merchant.inventories.v1beta; /** * * * <pre> * Request message for the `ListRegionalInventories` method. * </pre> * * Protobuf type {@code google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest} */ public final class ListRegionalInventoriesRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest) ListRegionalInventoriesRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListRegionalInventoriesRequest.newBuilder() to construct. private ListRegionalInventoriesRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListRegionalInventoriesRequest() { parent_ = ""; pageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListRegionalInventoriesRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.inventories.v1beta.RegionalInventoryProto .internal_static_google_shopping_merchant_inventories_v1beta_ListRegionalInventoriesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.inventories.v1beta.RegionalInventoryProto .internal_static_google_shopping_merchant_inventories_v1beta_ListRegionalInventoriesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest.class, com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest.Builder .class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The `name` of the parent product to list `RegionalInventory` * resources for. Format: `accounts/{account}/products/{product}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The `name` of the parent product to list `RegionalInventory` * resources for. Format: `accounts/{account}/products/{product}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_ = 0; /** * * * <pre> * The maximum number of `RegionalInventory` resources for the given product * to return. The service returns fewer than this value if the number of * inventories for the given product is less that than the `pageSize`. The * default value is 25000. The maximum value is 100000; If a value higher than * the maximum is specified, then the `pageSize` will default to the maximum. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * A page token, received from a previous `ListRegionalInventories` call. * Provide the page token to retrieve the subsequent page. * * When paginating, all other parameters provided to `ListRegionalInventories` * must match the call that provided the page token. The token returned as * [nextPageToken][google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesResponse.next_page_token] * in the response to the previous request. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * A page token, received from a previous `ListRegionalInventories` call. * Provide the page token to retrieve the subsequent page. * * When paginating, all other parameters provided to `ListRegionalInventories` * must match the call that provided the page token. The token returned as * [nextPageToken][google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesResponse.next_page_token] * in the response to the previous request. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest)) { return super.equals(obj); } com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest other = (com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest) obj; if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for the `ListRegionalInventories` method. * </pre> * * Protobuf type {@code * google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest) com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.inventories.v1beta.RegionalInventoryProto .internal_static_google_shopping_merchant_inventories_v1beta_ListRegionalInventoriesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.inventories.v1beta.RegionalInventoryProto .internal_static_google_shopping_merchant_inventories_v1beta_ListRegionalInventoriesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest.class, com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest.Builder .class); } // Construct using // com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; pageSize_ = 0; pageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.shopping.merchant.inventories.v1beta.RegionalInventoryProto .internal_static_google_shopping_merchant_inventories_v1beta_ListRegionalInventoriesRequest_descriptor; } @java.lang.Override public com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest getDefaultInstanceForType() { return com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest .getDefaultInstance(); } @java.lang.Override public com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest build() { com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest buildPartial() { com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest result = new com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageToken_ = pageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest) { return mergeFrom( (com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest other) { if (other == com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest .getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The `name` of the parent product to list `RegionalInventory` * resources for. Format: `accounts/{account}/products/{product}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The `name` of the parent product to list `RegionalInventory` * resources for. Format: `accounts/{account}/products/{product}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The `name` of the parent product to list `RegionalInventory` * resources for. Format: `accounts/{account}/products/{product}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The `name` of the parent product to list `RegionalInventory` * resources for. Format: `accounts/{account}/products/{product}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The `name` of the parent product to list `RegionalInventory` * resources for. Format: `accounts/{account}/products/{product}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int pageSize_; /** * * * <pre> * The maximum number of `RegionalInventory` resources for the given product * to return. The service returns fewer than this value if the number of * inventories for the given product is less that than the `pageSize`. The * default value is 25000. The maximum value is 100000; If a value higher than * the maximum is specified, then the `pageSize` will default to the maximum. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * The maximum number of `RegionalInventory` resources for the given product * to return. The service returns fewer than this value if the number of * inventories for the given product is less that than the `pageSize`. The * default value is 25000. The maximum value is 100000; If a value higher than * the maximum is specified, then the `pageSize` will default to the maximum. * </pre> * * <code>int32 page_size = 2;</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The maximum number of `RegionalInventory` resources for the given product * to return. The service returns fewer than this value if the number of * inventories for the given product is less that than the `pageSize`. The * default value is 25000. The maximum value is 100000; If a value higher than * the maximum is specified, then the `pageSize` will default to the maximum. * </pre> * * <code>int32 page_size = 2;</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000002); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * A page token, received from a previous `ListRegionalInventories` call. * Provide the page token to retrieve the subsequent page. * * When paginating, all other parameters provided to `ListRegionalInventories` * must match the call that provided the page token. The token returned as * [nextPageToken][google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesResponse.next_page_token] * in the response to the previous request. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A page token, received from a previous `ListRegionalInventories` call. * Provide the page token to retrieve the subsequent page. * * When paginating, all other parameters provided to `ListRegionalInventories` * must match the call that provided the page token. The token returned as * [nextPageToken][google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesResponse.next_page_token] * in the response to the previous request. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A page token, received from a previous `ListRegionalInventories` call. * Provide the page token to retrieve the subsequent page. * * When paginating, all other parameters provided to `ListRegionalInventories` * must match the call that provided the page token. The token returned as * [nextPageToken][google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesResponse.next_page_token] * in the response to the previous request. * </pre> * * <code>string page_token = 3;</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * A page token, received from a previous `ListRegionalInventories` call. * Provide the page token to retrieve the subsequent page. * * When paginating, all other parameters provided to `ListRegionalInventories` * must match the call that provided the page token. The token returned as * [nextPageToken][google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesResponse.next_page_token] * in the response to the previous request. * </pre> * * <code>string page_token = 3;</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * A page token, received from a previous `ListRegionalInventories` call. * Provide the page token to retrieve the subsequent page. * * When paginating, all other parameters provided to `ListRegionalInventories` * must match the call that provided the page token. The token returned as * [nextPageToken][google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesResponse.next_page_token] * in the response to the previous request. * </pre> * * <code>string page_token = 3;</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest) } // @@protoc_insertion_point(class_scope:google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest) private static final com.google.shopping.merchant.inventories.v1beta .ListRegionalInventoriesRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest(); } public static com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListRegionalInventoriesRequest> PARSER = new com.google.protobuf.AbstractParser<ListRegionalInventoriesRequest>() { @java.lang.Override public ListRegionalInventoriesRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListRegionalInventoriesRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListRegionalInventoriesRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.shopping.merchant.inventories.v1beta.ListRegionalInventoriesRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/commons-jexl
35,237
src/main/java/org/apache/commons/jexl3/JexlFeatures.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.jexl3; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Objects; import java.util.Set; import java.util.TreeSet; import java.util.function.Predicate; /** * A set of language feature options. * <p> * These control <em>syntactical</em> constructs that will throw JexlException.Feature exceptions (a * subclass of JexlException.Parsing) when disabled. * </p> * <p>It is recommended to be explicit in choosing the features you need rather than rely on the default * constructor: the 2 convenience methods {@link JexlFeatures#createNone()} and {@link JexlFeatures#createAll()} * are the recommended starting points to selectively enable or disable chosen features.</p> * <ul> * <li>Registers: register syntax (#number), used internally for {g,s}etProperty * <li>Reserved Names: a set of reserved variable names that cannot be used as local variable (or parameter) names * <li>Global Side Effect : assigning/modifying values on global variables (=, += , -=, ...) * <li>Lexical: lexical scope, prevents redefining local variables * <li>Lexical Shade: local variables shade globals, prevents confusing a global variable with a local one * <li>Side Effect : assigning/modifying values on any variables or left-value * <li>Constant Array Reference: ensures array references only use constants;they should be statically solvable. * <li>New Instance: creating an instance using new(...) * <li>Loops: loop constructs (while(true), for(...)) * <li>Lambda: function definitions (()-&gt;{...}, function(...) ). * <li>Method calls: calling methods (obj.method(...) or obj['method'](...)); when disabled, leaves function calls * - including namespace prefixes - available * <li>Structured literals: arrays, lists, maps, sets, ranges * <li>Pragma: pragma construct as in {@code #pragma x y} * <li>Annotation: @annotation statement; * <li>Thin-arrow: use the thin-arrow, ie {@code ->} for lambdas as in {@code x -> x + x} * <li>Fat-arrow: use the fat-arrow, ie {@code =>} for lambdas as in {@code x => x + x} * <li>Namespace pragma: whether the {@code #pragma jexl.namespace.ns namespace} syntax is allowed</li> * <li>Namespace identifier: whether the {@code ns:fun(...)} parser treats the ns:fun as one identifier, no spaces allowed</li> * <li>Import pragma: whether the {@code #pragma jexl.import fully.qualified.class.name} syntax is allowed</li> * <li>Comparator names: whether the comparator operator names can be used (as in {@code gt} for &gt;, * {@code lt} for &lt;, ...)</li> * <li>Pragma anywhere: whether pragma, that are <em>not</em> statements and handled before execution begins, * can appear anywhere in the source or before any statements - ie at the beginning of a script.</li> * <li>Const Capture: whether variables captured by lambdas are read-only (aka const, same as Java) or read-write.</li> * <li>Reference Capture: whether variables captured by lambdas are pass-by-reference or pass-by-value.</li> * </ul> * @since 3.2 */ public final class JexlFeatures { /** The false predicate. */ public static final Predicate<String> TEST_STR_FALSE = s -> false; /** Te feature names (for toString()). */ private static final String[] F_NAMES = { "register", "reserved variable", "local variable", "assign/modify", "global assign/modify", "array reference", "create instance", "loop", "function", "method call", "set/map/array literal", "pragma", "annotation", "script", "lexical", "lexicalShade", "thin-arrow", "fat-arrow", "namespace pragma", "namespace identifier", "import pragma", "comparator names", "pragma anywhere", "const capture", "ref capture", "ambiguous statement" }; /** Registers feature ordinal. */ private static final int REGISTER = 0; /** Reserved future feature ordinal (unused as of 3.3.1). */ public static final int RESERVED = 1; /** Locals feature ordinal. */ public static final int LOCAL_VAR = 2; /** Side effects feature ordinal. */ public static final int SIDE_EFFECT = 3; /** Global side effects feature ordinal. */ public static final int SIDE_EFFECT_GLOBAL = 4; /** Expressions allowed in array reference ordinal. */ public static final int ARRAY_REF_EXPR = 5; /** New-instance feature ordinal. */ public static final int NEW_INSTANCE = 6; /** Loops feature ordinal. */ public static final int LOOP = 7; /** Lambda feature ordinal. */ public static final int LAMBDA = 8; /** Lambda feature ordinal. */ public static final int METHOD_CALL = 9; /** Structured literal feature ordinal. */ public static final int STRUCTURED_LITERAL = 10; /** Pragma feature ordinal. */ public static final int PRAGMA = 11; /** Annotation feature ordinal. */ public static final int ANNOTATION = 12; /** Script feature ordinal. */ public static final int SCRIPT = 13; /** Lexical feature ordinal. */ public static final int LEXICAL = 14; /** Lexical shade feature ordinal. */ public static final int LEXICAL_SHADE = 15; /** Thin-arrow lambda syntax. */ public static final int THIN_ARROW = 16; /** Fat-arrow lambda syntax. */ public static final int FAT_ARROW = 17; /** Namespace pragma feature ordinal. */ public static final int NS_PRAGMA = 18; /** Namespace syntax as an identifier (no space). */ public static final int NS_IDENTIFIER = 19; /** Import pragma feature ordinal. */ public static final int IMPORT_PRAGMA = 20; /** Comparator names (legacy) syntax. */ public static final int COMPARATOR_NAMES = 21; /** The pragma anywhere feature ordinal. */ public static final int PRAGMA_ANYWHERE = 22; /** Captured variables are const. */ public static final int CONST_CAPTURE = 23; /** Captured variables are reference. */ public static final int REF_CAPTURE = 24; /** Ambiguous or strict statement allowed. */ public static final int AMBIGUOUS_STATEMENT = 25; /** * All features. * Ensure this is updated if additional features are added. */ private static final long ALL_FEATURES = (1L << AMBIGUOUS_STATEMENT + 1) - 1L; // MUST REMAIN PRIVATE /** * The default features flag mask. * <p>Meant for compatibility with scripts written before 3.3.1</p> */ private static final long DEFAULT_FEATURES = // MUST REMAIN PRIVATE 1L << LOCAL_VAR | 1L << SIDE_EFFECT | 1L << SIDE_EFFECT_GLOBAL | 1L << ARRAY_REF_EXPR | 1L << NEW_INSTANCE | 1L << LOOP | 1L << LAMBDA | 1L << METHOD_CALL | 1L << STRUCTURED_LITERAL | 1L << PRAGMA | 1L << ANNOTATION | 1L << SCRIPT | 1L << THIN_ARROW | 1L << NS_PRAGMA | 1L << IMPORT_PRAGMA | 1L << COMPARATOR_NAMES | 1L << PRAGMA_ANYWHERE; /** * The canonical scripting (since 3.3.1) features flag mask based on the original default. * <p>Adds lexical, lexical-shade and const-capture but removes comparator-names and pragma-anywhere</p> */ private static final long SCRIPT_FEATURES = // MUST REMAIN PRIVATE (DEFAULT_FEATURES | 1L << LEXICAL | 1L << LEXICAL_SHADE | 1L << CONST_CAPTURE) // these parentheses are necessary :-) & ~(1L << COMPARATOR_NAMES) & ~(1L << PRAGMA_ANYWHERE); /** * Protected future syntactic elements. * <p><em>class, jexl, $jexl</em></p> * @since 3.3.1 */ private static final Set<String> RESERVED_WORDS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList("class", "jexl", "$jexl"))); /* * *WARNING* * Static fields may be inlined by the Java compiler, so their _values_ effectively form part of the external API. * Classes that reference them need to be recompiled to pick up new values. * This means that changes in value are not binary compatible. * Such fields must be private or problems may occur. */ /** * Creates an all features enabled set. * @return a new instance of all features set * @since 3.3.1 */ public static JexlFeatures createAll() { return new JexlFeatures(ALL_FEATURES, null, null); } /** * Creates a default features set suitable for basic but complete scripting needs. * <p>Maximizes compatibility with older version scripts (before 3.3), new projects should * use {@link JexlFeatures#createScript()} or equivalent features as a base.</p> * <p>The following scripting features are enabled:</p> * <ul> * <li>local variable, {@link JexlFeatures#supportsLocalVar()}</li> * <li>side effect, {@link JexlFeatures#supportsSideEffect()}</li> * <li>global side effect, {@link JexlFeatures#supportsSideEffectGlobal()}</li> * <li>array reference expression, {@link JexlFeatures#supportsStructuredLiteral()}</li> * <li>new instance, {@link JexlFeatures#supportsNewInstance()} </li> * <li>loop, {@link JexlFeatures#supportsLoops()}</li> * <li>lambda, {@link JexlFeatures#supportsLambda()}</li> * <li>method call, {@link JexlFeatures#supportsMethodCall()}</li> * <li>structured literal, {@link JexlFeatures#supportsStructuredLiteral()}</li> * <li>pragma, {@link JexlFeatures#supportsPragma()}</li> * <li>annotation, {@link JexlFeatures#supportsAnnotation()}</li> * <li>script, {@link JexlFeatures#supportsScript()}</li> * <li>comparator names, {@link JexlFeatures#supportsComparatorNames()}</li> * <li>namespace pragma, {@link JexlFeatures#supportsNamespacePragma()}</li> * <li>import pragma, {@link JexlFeatures#supportsImportPragma()}</li> * <li>pragma anywhere, {@link JexlFeatures#supportsPragmaAnywhere()}</li> * </ul> * @return a new instance of a default scripting features set * @since 3.3.1 */ public static JexlFeatures createDefault() { return new JexlFeatures(DEFAULT_FEATURES, null, null); } /** * Creates an empty feature set. * <p>This is the strictest base-set since no feature is allowed, suitable as-is only * for the simplest expressions.</p> * @return a new instance of an empty features set * @since 3.3.1 */ public static JexlFeatures createNone() { return new JexlFeatures(0L, null, null); } /** * The modern scripting features set. * <p>This is the recommended set for new projects.</p> * <p>All default features with the following differences:</p> * <ul> * <li><em>disable</em> pragma-anywhere, {@link JexlFeatures#supportsPragmaAnywhere()}</li> * <li><em>disable</em> comparator-names, {@link JexlFeatures#supportsComparatorNames()}</li> * <li><em>enable</em> lexical, {@link JexlFeatures#isLexical()}</li> * <li><em>enable</em> lexical-shade, {@link JexlFeatures#isLexicalShade()} </li> * <li><em>enable</em> const-capture, {@link JexlFeatures#supportsConstCapture()}</li> * </ul> * <p>It also adds a set of reserved words to enable future unencumbered syntax evolution: * <em>try, catch, throw, finally, switch, case, default, class, instanceof</em> * </p> * @return a new instance of a modern scripting features set * @since 3.3.1 */ public static JexlFeatures createScript() { return new JexlFeatures(SCRIPT_FEATURES, RESERVED_WORDS, null); } /** * The text corresponding to a feature code. * @param feature the feature number * @return the feature name */ public static String stringify(final int feature) { return feature >= 0 && feature < F_NAMES.length ? F_NAMES[feature] : "unsupported feature"; } /** The feature flags. */ private long flags; /** The set of reserved names, aka global variables that cannot be masked by local variables or parameters. */ private Set<String> reservedNames; /** The namespace names. */ private Predicate<String> nameSpaces; /** * Creates default instance, equivalent to the result of calling the preferred alternative * {@link JexlFeatures#createDefault()} */ public JexlFeatures() { this(DEFAULT_FEATURES, null, null); } /** * Copy constructor. * @param features the feature to copy from */ public JexlFeatures(final JexlFeatures features) { this(features.flags, features.reservedNames, features.nameSpaces); } /** * An all member constructor for derivation. * <p>Not respecting immutability or thread-safety constraints for this class constructor arguments will * likely result in unexpected behavior.</p> * @param f flag * @param r reserved variable names; must be an immutable Set or thread-safe (concurrent or synchronized set) * @param n namespace predicate; must be stateless or thread-safe */ protected JexlFeatures(final long f, final Set<String> r, final Predicate<String> n) { this.flags = f; this.reservedNames = r == null? Collections.emptySet() : r; this.nameSpaces = n == null? TEST_STR_FALSE : n; } /** * Sets whether annotation constructs are enabled. * <p> * When disabled, parsing a script/expression using syntactic annotation constructs (@annotation) * will throw a parsing exception. * </p> * @param flag true to enable, false to disable * @return this features instance */ public JexlFeatures annotation(final boolean flag) { setFeature(ANNOTATION, flag); return this; } /** * Sets whether array references expressions are enabled. * <p> * When disabled, parsing a script/expression using 'obj[ ref ]' where ref is not a string or integer literal * will throw a parsing exception; * </p> * @param flag true to enable, false to disable * @return this features instance */ public JexlFeatures arrayReferenceExpr(final boolean flag) { setFeature(ARRAY_REF_EXPR, flag); return this; } /** * Sets whether the legacy comparison operator names syntax is enabled. * <p> * When disabled, comparison operators names (eq;ne;le;lt;ge;gt) * will be treated as plain identifiers. * </p> * @param flag true to enable, false to disable * @return this features instance * @since 3.3 */ public JexlFeatures comparatorNames(final boolean flag) { setFeature(COMPARATOR_NAMES, flag); return this; } /** * Sets whether lambda captured-variables are constant or mutable. * <p> * When disabled, lambda-captured variables are implicitly converted to read-write local variable (let), * when enabled, those are implicitly converted to read-only local variables (const). * </p> * @param flag true to enable, false to disable * @return this features instance */ public JexlFeatures constCapture(final boolean flag) { setFeature(CONST_CAPTURE, flag); return this; } /** * Sets whether lambda captured-variables are references or values. * <p>When variables are pass-by-reference, side effects are visible from inner lexical scopes * to outer-scope.</p> * <p> * When disabled, lambda-captured variables use pass-by-value semantic, * when enabled, those use pass-by-reference semantic. * </p> * @param flag true to enable, false to disable * @return this features instance */ public JexlFeatures referenceCapture(final boolean flag) { setFeature(REF_CAPTURE, flag); return this; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final JexlFeatures other = (JexlFeatures) obj; if (this.flags != other.flags) { return false; } if (this.nameSpaces != other.nameSpaces) { return false; } if (!Objects.equals(this.reservedNames, other.reservedNames)) { return false; } return true; } /** * Sets whether fat-arrow lambda syntax is enabled. * <p> * When disabled, parsing a script/expression using syntactic fat-arrow (=&lt;) * will throw a parsing exception. * </p> * @param flag true to enable, false to disable * @return this features instance * @since 3.3 */ public JexlFeatures fatArrow(final boolean flag) { setFeature(FAT_ARROW, flag); return this; } /** * Gets a feature flag value. * @param feature feature ordinal * @return true if on, false if off */ private boolean getFeature(final int feature) { return (flags & 1L << feature) != 0L; } /** * Gets the feature flags * @return these features&quot;s flags */ public long getFlags() { return flags; } /** * Gets the immutable set of reserved names. * @return the (unmodifiable) set of reserved names. */ public Set<String> getReservedNames() { return reservedNames; } @Override public int hashCode() { //CSOFF: MagicNumber int hash = 3; hash = 53 * hash + (int) (this.flags ^ this.flags >>> 32); hash = 53 * hash + (this.reservedNames != null ? this.reservedNames.hashCode() : 0); return hash; } /** * Sets whether import pragma constructs are enabled. * <p> * When disabled, parsing a script/expression using syntactic import pragma constructs * (#pragma jexl.import....) will throw a parsing exception. * </p> * @param flag true to enable, false to disable * @return this features instance * @since 3.3 */ public JexlFeatures importPragma(final boolean flag) { setFeature(IMPORT_PRAGMA, flag); return this; } /** * Is the lexical scope feature enabled? * @return whether lexical scope feature is enabled */ public boolean isLexical() { return getFeature(LEXICAL); } /** * Is the lexical shade feature enabled? * @return whether lexical shade feature is enabled */ public boolean isLexicalShade() { return getFeature(LEXICAL_SHADE); } /** * Checks whether a name is reserved. * @param name the name to check * @return true if reserved, false otherwise */ public boolean isReservedName(final String name) { return name != null && reservedNames.contains(name); } /** * Sets whether lambda/function constructs are enabled. * <p> * When disabled, parsing a script/expression using syntactic lambda constructs (-&gt;,function) * will throw a parsing exception. * </p> * @param flag true to enable, false to disable * @return this features instance */ public JexlFeatures lambda(final boolean flag) { setFeature(LAMBDA, flag); return this; } /** * Sets whether syntactic lexical mode is enabled. * * @param flag true means syntactic lexical function scope is in effect, false implies non-lexical scoping * @return this features instance */ public JexlFeatures lexical(final boolean flag) { setFeature(LEXICAL, flag); if (!flag) { setFeature(LEXICAL_SHADE, false); } return this; } /** * Sets whether syntactic lexical shade is enabled. * * @param flag true means syntactic lexical shade is in effect and implies lexical scope * @return this features instance */ public JexlFeatures lexicalShade(final boolean flag) { setFeature(LEXICAL_SHADE, flag); if (flag) { setFeature(LEXICAL, true); } return this; } /** * Sets whether local variables are enabled. * <p> * When disabled, parsing a script/expression using a local variable or parameter syntax * will throw a parsing exception. * </p> * @param flag true to enable, false to disable * @return this features instance */ public JexlFeatures localVar(final boolean flag) { setFeature(LOCAL_VAR, flag); return this; } /** * Sets whether looping constructs are enabled. * <p> * When disabled, parsing a script/expression using syntactic looping constructs (for,while) * will throw a parsing exception. * </p> * @param flag true to enable, false to disable * @return this features instance */ public JexlFeatures loops(final boolean flag) { setFeature(LOOP, flag); return this; } /** * Sets whether method calls expressions are enabled. * <p> * When disabled, parsing a script/expression using 'obj.method()' * will throw a parsing exception; * </p> * @param flag true to enable, false to disable * @return this features instance */ public JexlFeatures methodCall(final boolean flag) { setFeature(METHOD_CALL, flag); return this; } /** * Sets whether namespace pragma constructs are enabled. * <p> * When disabled, parsing a script/expression using syntactic namespace pragma constructs * (#pragma jexl.namespace....) will throw a parsing exception. * </p> * @param flag true to enable, false to disable * @return this features instance * @since 3.3 */ public JexlFeatures namespacePragma(final boolean flag) { setFeature(NS_PRAGMA, flag); return this; } /** * Sets whether namespace as identifier syntax is enabled. * <p> * When enabled, a namespace call must be of the form <code>ns:fun(...)</code> with no * spaces between the namespace name and the function. * </p> * @param flag true to enable, false to disable * @return this features instance * @since 3.5.0 */ public JexlFeatures namespaceIdentifier(final boolean flag) { setFeature(NS_IDENTIFIER, flag); return this; } /** * Gets the declared namespaces test. * @return the declared namespaces test. */ public Predicate<String> namespaceTest() { return nameSpaces; } /** * Sets a test to determine namespace declaration. * @param names the name predicate * @return this features instance */ public JexlFeatures namespaceTest(final Predicate<String> names) { nameSpaces = names == null ? TEST_STR_FALSE : names; return this; } /** * Sets whether creating new instances is enabled. * <p> * When disabled, parsing a script/expression using 'new(...)' will throw a parsing exception; * using a class as functor will fail at runtime. * </p> * @param flag true to enable, false to disable * @return this features instance */ public JexlFeatures newInstance(final boolean flag) { setFeature(NEW_INSTANCE, flag); return this; } /** * Sets whether pragma constructs are enabled. * <p> * When disabled, parsing a script/expression using syntactic pragma constructs (#pragma) * will throw a parsing exception. * </p> * @param flag true to enable, false to disable * @return this features instance */ public JexlFeatures pragma(final boolean flag) { setFeature(PRAGMA, flag); if (!flag) { setFeature(NS_PRAGMA, false); setFeature(IMPORT_PRAGMA, false); } return this; } /** * Sets whether pragma constructs can appear anywhere in the code. * * @param flag true to enable, false to disable * @return this features instance * @since 3.3 */ public JexlFeatures pragmaAnywhere(final boolean flag) { setFeature(PRAGMA_ANYWHERE, flag); return this; } /** * Sets whether register are enabled. * <p> * This is mostly used internally during execution of JexlEngine.{g,s}etProperty. * </p> * <p> * When disabled, parsing a script/expression using the register syntax will throw a parsing exception. * </p> * @param flag true to enable, false to disable * @return this features instance */ public JexlFeatures register(final boolean flag) { setFeature(REGISTER, flag); return this; } /** * Sets a collection of reserved r precluding those to be used as local variables or parameter r. * @param names the r to reserve * @return this features instance */ public JexlFeatures reservedNames(final Collection<String> names) { if (names == null || names.isEmpty()) { reservedNames = Collections.emptySet(); } else { reservedNames = Collections.unmodifiableSet(new TreeSet<>(names)); } return this; } /** * Sets whether scripts constructs are enabled. * <p> * When disabled, parsing a script using syntactic script constructs (statements, ...) * will throw a parsing exception. * </p> * @param flag true to enable, false to disable * @return this features instance */ public JexlFeatures script(final boolean flag) { setFeature(SCRIPT, flag); return this; } /** * Sets a feature flag. * @param feature the feature ordinal * @param flag turn-on, turn off */ private void setFeature(final int feature, final boolean flag) { if (flag) { flags |= 1L << feature; } else { flags &= ~(1L << feature); } } /** * Sets whether statements can be ambiguous. * <p> * When enabled, the semicolumn is not required between expressions that otherwise are considered * ambiguous. The default will report ambiguity in cases like <code>if (true) { x 5 }</code> considering this * may be missing an operator or that the intent is not clear. * </p> * @param flag true to enable, false to disable * @return this features instance */ public JexlFeatures ambiguousStatement(final boolean flag) { setFeature(AMBIGUOUS_STATEMENT, flag); return this; } /** * Checks whether statements can be ambiguous. * <p> * When enabled, the semicolumn is not required between expressions that otherwise are considered * ambiguous. The default will report ambiguity in cases like <code>if (true) { x 5 }</code> considering this * may be missing an operator or that the intent is not clear. * </p> * @return true if statements can be ambiguous, false otherwise */ public boolean supportsAmbiguousStatement() { boolean sas = getFeature(AMBIGUOUS_STATEMENT); return sas; } /** * Sets whether side effect expressions are enabled. * <p> * When disabled, parsing a script/expression using syntactical constructs modifying variables * or members will throw a parsing exception. * </p> * @param flag true to enable, false to disable * @return this features instance */ public JexlFeatures sideEffect(final boolean flag) { setFeature(SIDE_EFFECT, flag); return this; } /** * Sets whether side effect expressions on global variables (aka non-local) are enabled. * <p> * When disabled, parsing a script/expression using syntactical constructs modifying variables * <em>including all potentially ant-ish variables</em> will throw a parsing exception. * </p> * @param flag true to enable, false to disable * @return this features instance */ public JexlFeatures sideEffectGlobal(final boolean flag) { setFeature(SIDE_EFFECT_GLOBAL, flag); return this; } /** * Sets whether array/map/set literal expressions are enabled. * <p> * When disabled, parsing a script/expression creating one of these literals * will throw a parsing exception; * </p> * @param flag true to enable, false to disable * @return this features instance */ public JexlFeatures structuredLiteral(final boolean flag) { setFeature(STRUCTURED_LITERAL, flag); return this; } /** * Does the engine support annotations? * @return true if annotation are enabled, false otherwise */ public boolean supportsAnnotation() { return getFeature(ANNOTATION); } /** * Does the engine support array references which contain method call expressions? * @return true if array references can contain method call expressions, false otherwise */ public boolean supportsArrayReferenceExpr() { return getFeature(ARRAY_REF_EXPR); } /** * Does the engine support legacy comparison operator names syntax? * @return true if legacy comparison operator names syntax is enabled, false otherwise * @since 3.3 */ public boolean supportsComparatorNames() { return getFeature(COMPARATOR_NAMES); } /** * Does the engine support lambda captured-variables as const? * @return true if lambda captured-variables are const, false otherwise */ public boolean supportsConstCapture() { return getFeature(CONST_CAPTURE); } /** * Does the engine support lambda captured-variables as references? * @return true if lambda captured-variables are references, false otherwise */ public boolean supportsReferenceCapture() { return getFeature(REF_CAPTURE); } /** * Does the engine support expressions (aka not scripts) * @return true if expressions (aka not scripts) are enabled, false otherwise */ public boolean supportsExpression() { return !getFeature(SCRIPT); } /** * Does the engine support fat-arrow lambda syntax? * @return true if fat-arrow lambda syntax is enabled, false otherwise * @since 3.3 */ public boolean supportsFatArrow() { return getFeature(FAT_ARROW); } /** * Does the engine support import pragma? * @return true if import pragma are enabled, false otherwise * @since 3.3 */ public boolean supportsImportPragma() { return getFeature(IMPORT_PRAGMA); } /** * Does the engine support lambdas? * @return true if lambda are enabled, false otherwise */ public boolean supportsLambda() { return getFeature(LAMBDA); } /** * Is local variables syntax enabled? * @return true if local variables syntax is enabled */ public boolean supportsLocalVar() { return getFeature(LOCAL_VAR); } /** * Are loops enabled? * @return true if loops are enabled, false otherwise */ public boolean supportsLoops() { return getFeature(LOOP); } /** * Can array references contain expressions? * @return true if array references can contain expressions, false otherwise */ public boolean supportsMethodCall() { return getFeature(METHOD_CALL); } /** * Is namespace pragma enabled? * @return true if namespace pragma are enabled, false otherwise * @since 3.3 */ public boolean supportsNamespacePragma() { return getFeature(NS_PRAGMA); } /** * Is namespace identifier syntax enabled? * @return true if namespace identifier syntax is enabled, false otherwise * @since 3.5.0 */ public boolean supportsNamespaceIdentifier() { return getFeature(NS_IDENTIFIER); } /** * Is creating new instances enabled? * @return true if creating new instances is enabled, false otherwise */ public boolean supportsNewInstance() { return getFeature(NEW_INSTANCE); } /** * Is the namespace pragma enabled? * @return true if namespace pragma are enabled, false otherwise */ public boolean supportsPragma() { return getFeature(PRAGMA); } /** * Can pragma constructs appear anywhere in the code? * @return true if pragma constructs can appear anywhere in the code, false otherwise * @since 3.3 */ public boolean supportsPragmaAnywhere() { return getFeature(PRAGMA_ANYWHERE); } /** * Is register syntax enabled? * @return true if register syntax is enabled */ public boolean supportsRegister() { return getFeature(REGISTER); } /** * Are scripts enabled? * @return true if scripts are enabled, false otherwise */ public boolean supportsScript() { return getFeature(SCRIPT); } /** * Are side effects enabled? * @return true if side effects are enabled, false otherwise */ public boolean supportsSideEffect() { return getFeature(SIDE_EFFECT); } /** * Can global variables be assigned? * @return true if global variables can be assigned */ public boolean supportsSideEffectGlobal() { return getFeature(SIDE_EFFECT_GLOBAL); } /** * Are array/map/set literal expressions supported? * @return true if array/map/set literal expressions are supported, false otherwise */ public boolean supportsStructuredLiteral() { return getFeature(STRUCTURED_LITERAL); } /** * Is thin-arrow lambda syntax enabled? * @return true if thin-arrow lambda syntax is enabled, false otherwise * @since 3.3 */ public boolean supportsThinArrow() { return getFeature(THIN_ARROW); } /** * Sets whether thin-arrow lambda syntax is enabled. * <p> * When disabled, parsing a script/expression using syntactic thin-arrow (-&lt;) * will throw a parsing exception. * </p> * @param flag true to enable, false to disable * @return this features instance * @since 3.3 */ public JexlFeatures thinArrow(final boolean flag) { setFeature(THIN_ARROW, flag); return this; } }
google/nomulus
35,163
core/src/test/java/google/registry/flows/domain/DomainRestoreRequestFlowTest.java
// Copyright 2017 The Nomulus Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package google.registry.flows.domain; import static com.google.common.truth.Truth.assertThat; import static google.registry.testing.DatabaseHelper.assertBillingEvents; import static google.registry.testing.DatabaseHelper.assertDomainDnsRequests; import static google.registry.testing.DatabaseHelper.assertPollMessages; import static google.registry.testing.DatabaseHelper.createTld; import static google.registry.testing.DatabaseHelper.getOnlyHistoryEntryOfType; import static google.registry.testing.DatabaseHelper.getPollMessages; import static google.registry.testing.DatabaseHelper.loadByKey; import static google.registry.testing.DatabaseHelper.loadRegistrar; import static google.registry.testing.DatabaseHelper.persistActiveDomain; import static google.registry.testing.DatabaseHelper.persistDeletedDomain; import static google.registry.testing.DatabaseHelper.persistReservedList; import static google.registry.testing.DatabaseHelper.persistResource; import static google.registry.testing.DomainSubject.assertAboutDomains; import static google.registry.testing.EppExceptionSubject.assertAboutEppExceptions; import static google.registry.util.DateTimeUtils.END_OF_TIME; import static google.registry.util.DateTimeUtils.START_OF_TIME; import static org.joda.money.CurrencyUnit.EUR; import static org.joda.money.CurrencyUnit.JPY; import static org.joda.money.CurrencyUnit.USD; import static org.junit.jupiter.api.Assertions.assertThrows; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedMap; import google.registry.flows.EppException; import google.registry.flows.EppException.UnimplementedExtensionException; import google.registry.flows.FlowUtils.NotLoggedInException; import google.registry.flows.FlowUtils.UnknownCurrencyEppException; import google.registry.flows.ResourceFlowTestCase; import google.registry.flows.ResourceFlowUtils.ResourceDoesNotExistException; import google.registry.flows.ResourceFlowUtils.ResourceNotOwnedException; import google.registry.flows.domain.DomainFlowUtils.CurrencyUnitMismatchException; import google.registry.flows.domain.DomainFlowUtils.CurrencyValueScaleException; import google.registry.flows.domain.DomainFlowUtils.DomainReservedException; import google.registry.flows.domain.DomainFlowUtils.FeesMismatchException; import google.registry.flows.domain.DomainFlowUtils.FeesRequiredForPremiumNameException; import google.registry.flows.domain.DomainFlowUtils.MissingBillingAccountMapException; import google.registry.flows.domain.DomainFlowUtils.NotAuthorizedForTldException; import google.registry.flows.domain.DomainFlowUtils.PremiumNameBlockedException; import google.registry.flows.domain.DomainFlowUtils.RegistrarMustBeActiveForThisOperationException; import google.registry.flows.domain.DomainFlowUtils.UnsupportedFeeAttributeException; import google.registry.flows.domain.DomainRestoreRequestFlow.DomainNotEligibleForRestoreException; import google.registry.flows.domain.DomainRestoreRequestFlow.RestoreCommandIncludesChangesException; import google.registry.model.billing.BillingBase.Flag; import google.registry.model.billing.BillingBase.Reason; import google.registry.model.billing.BillingEvent; import google.registry.model.billing.BillingRecurrence; import google.registry.model.domain.Domain; import google.registry.model.domain.DomainHistory; import google.registry.model.domain.GracePeriod; import google.registry.model.domain.rgp.GracePeriodStatus; import google.registry.model.eppcommon.StatusValue; import google.registry.model.poll.PollMessage; import google.registry.model.registrar.Registrar; import google.registry.model.registrar.Registrar.State; import google.registry.model.reporting.DomainTransactionRecord; import google.registry.model.reporting.DomainTransactionRecord.TransactionReportField; import google.registry.model.reporting.HistoryEntry; import google.registry.model.tld.Tld; import google.registry.persistence.VKey; import google.registry.testing.DatabaseHelper; import java.util.Map; import java.util.Optional; import org.joda.money.Money; import org.joda.time.DateTime; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; /** Unit tests for {@link DomainRestoreRequestFlow}. */ class DomainRestoreRequestFlowTest extends ResourceFlowTestCase<DomainRestoreRequestFlow, Domain> { private static final ImmutableMap<String, String> FEE_06_MAP = ImmutableMap.of("FEE_VERSION", "0.6", "FEE_NS", "fee", "CURRENCY", "USD"); private static final ImmutableMap<String, String> FEE_11_MAP = ImmutableMap.of("FEE_VERSION", "0.11", "FEE_NS", "fee11", "CURRENCY", "USD"); private static final ImmutableMap<String, String> FEE_12_MAP = ImmutableMap.of("FEE_VERSION", "0.12", "FEE_NS", "fee12", "CURRENCY", "USD"); @BeforeEach void initDomainTest() { createTld("tld"); persistResource( loadRegistrar("TheRegistrar") .asBuilder() .setBillingAccountMap(ImmutableMap.of(USD, "123", EUR, "567")) .build()); setEppInput("domain_update_restore_request.xml", ImmutableMap.of("DOMAIN", "example.tld")); } Domain persistPendingDeleteDomain() throws Exception { // The domain is now past what had been its expiration date at the time of deletion. return persistPendingDeleteDomain(clock.nowUtc().minusDays(5)); } Domain persistPendingDeleteDomain(DateTime expirationTime) throws Exception { Domain domain = persistResource(DatabaseHelper.newDomain(getUniqueIdFromCommand())); HistoryEntry historyEntry = persistResource( new DomainHistory.Builder() .setType(HistoryEntry.Type.DOMAIN_DELETE) .setModificationTime(clock.nowUtc()) .setRegistrarId(domain.getCurrentSponsorRegistrarId()) .setDomain(domain) .build()); domain = persistResource( domain .asBuilder() .setRegistrationExpirationTime(expirationTime) .setDeletionTime(clock.nowUtc().plusDays(35)) .addGracePeriod( GracePeriod.create( GracePeriodStatus.REDEMPTION, domain.getRepoId(), clock.nowUtc().plusDays(1), "TheRegistrar", null)) .setStatusValues(ImmutableSet.of(StatusValue.PENDING_DELETE)) .setDeletePollMessage( persistResource( new PollMessage.OneTime.Builder() .setRegistrarId("TheRegistrar") .setEventTime(clock.nowUtc().plusDays(5)) .setHistoryEntry(historyEntry) .build()) .createVKey()) .build()); clock.advanceOneMilli(); return domain; } @Test void testNotLoggedIn() { sessionMetadata.setRegistrarId(null); EppException thrown = assertThrows(NotLoggedInException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testDryRun() throws Exception { setEppInput("domain_update_restore_request.xml", ImmutableMap.of("DOMAIN", "example.tld")); persistPendingDeleteDomain(); dryRunFlowAssertResponse(loadFile("generic_success_response.xml")); } @Test void testSuccess_expiryStillInFuture_notExtended() throws Exception { setEppInput("domain_update_restore_request.xml", ImmutableMap.of("DOMAIN", "example.tld")); DateTime expirationTime = clock.nowUtc().plusYears(5).plusDays(45); persistPendingDeleteDomain(expirationTime); assertMutatingFlow(true); // Double check that we see a poll message in the future for when the delete happens. assertThat(getPollMessages("TheRegistrar", clock.nowUtc().plusMonths(1))).hasSize(1); runFlowAssertResponse(loadFile("generic_success_response.xml")); Domain domain = reloadResourceByForeignKey(); DomainHistory historyEntryDomainRestore = getOnlyHistoryEntryOfType(domain, HistoryEntry.Type.DOMAIN_RESTORE, DomainHistory.class); assertLastHistoryContainsResource(domain); assertThat(loadByKey(domain.getAutorenewBillingEvent()).getEventTime()) .isEqualTo(expirationTime); assertAboutDomains() .that(domain) // New expiration time should be the same as from before the deletion. .hasRegistrationExpirationTime(expirationTime) .and() .doesNotHaveStatusValue(StatusValue.PENDING_DELETE) .and() .hasDeletionTime(END_OF_TIME) .and() .hasOneHistoryEntryEachOfTypes( HistoryEntry.Type.DOMAIN_DELETE, HistoryEntry.Type.DOMAIN_RESTORE) .and() .hasLastEppUpdateTime(clock.nowUtc()) .and() .hasLastEppUpdateRegistrarId("TheRegistrar"); assertThat(domain.getGracePeriods()).isEmpty(); assertDomainDnsRequests("example.tld"); // The poll message for the delete should now be gone. The only poll message should be the new // autorenew poll message. assertPollMessages( "TheRegistrar", new PollMessage.Autorenew.Builder() .setTargetId("example.tld") .setRegistrarId("TheRegistrar") .setEventTime(domain.getRegistrationExpirationTime()) .setAutorenewEndTime(END_OF_TIME) .setMsg("Domain was auto-renewed.") .setHistoryEntry(historyEntryDomainRestore) .build()); // There should be a onetime for the restore and a new recurrence, but no renew onetime. assertBillingEvents( new BillingRecurrence.Builder() .setReason(Reason.RENEW) .setFlags(ImmutableSet.of(Flag.AUTO_RENEW)) .setTargetId("example.tld") .setRegistrarId("TheRegistrar") .setEventTime(expirationTime) .setRecurrenceEndTime(END_OF_TIME) .setDomainHistory(historyEntryDomainRestore) .build(), new BillingEvent.Builder() .setReason(Reason.RESTORE) .setTargetId("example.tld") .setRegistrarId("TheRegistrar") .setCost(Money.of(USD, 17)) .setPeriodYears(1) .setEventTime(clock.nowUtc()) .setBillingTime(clock.nowUtc()) .setDomainHistory(historyEntryDomainRestore) .build()); } @Test void testSuccess_expiryInPast_extendedByOneYear() throws Exception { setEppInput("domain_update_restore_request.xml", ImmutableMap.of("DOMAIN", "example.tld")); DateTime expirationTime = clock.nowUtc().minusDays(20); DateTime newExpirationTime = expirationTime.plusYears(1); persistPendingDeleteDomain(expirationTime); assertMutatingFlow(true); // Double check that we see a poll message in the future for when the delete happens. assertThat(getPollMessages("TheRegistrar", clock.nowUtc().plusMonths(1))).hasSize(1); runFlowAssertResponse(loadFile("generic_success_response.xml")); Domain domain = reloadResourceByForeignKey(); DomainHistory historyEntryDomainRestore = getOnlyHistoryEntryOfType(domain, HistoryEntry.Type.DOMAIN_RESTORE, DomainHistory.class); assertLastHistoryContainsResource(domain); assertThat(loadByKey(domain.getAutorenewBillingEvent()).getEventTime()) .isEqualTo(newExpirationTime); assertAboutDomains() .that(domain) // New expiration time should be exactly a year from now. .hasRegistrationExpirationTime(newExpirationTime) .and() .doesNotHaveStatusValue(StatusValue.PENDING_DELETE) .and() .hasDeletionTime(END_OF_TIME) .and() .hasOneHistoryEntryEachOfTypes( HistoryEntry.Type.DOMAIN_DELETE, HistoryEntry.Type.DOMAIN_RESTORE) .and() .hasLastEppUpdateTime(clock.nowUtc()) .and() .hasLastEppUpdateRegistrarId("TheRegistrar"); assertThat(domain.getGracePeriods()).isEmpty(); assertDomainDnsRequests("example.tld"); // The poll message for the delete should now be gone. The only poll message should be the new // autorenew poll message. assertPollMessages( "TheRegistrar", new PollMessage.Autorenew.Builder() .setTargetId("example.tld") .setRegistrarId("TheRegistrar") .setEventTime(domain.getRegistrationExpirationTime()) .setAutorenewEndTime(END_OF_TIME) .setMsg("Domain was auto-renewed.") .setHistoryEntry(historyEntryDomainRestore) .build()); // There should be a bill for the restore and an explicit renew, along with a new recurrence // autorenew event. assertBillingEvents( new BillingRecurrence.Builder() .setReason(Reason.RENEW) .setFlags(ImmutableSet.of(Flag.AUTO_RENEW)) .setTargetId("example.tld") .setRegistrarId("TheRegistrar") .setEventTime(newExpirationTime) .setRecurrenceEndTime(END_OF_TIME) .setDomainHistory(historyEntryDomainRestore) .build(), new BillingEvent.Builder() .setReason(Reason.RESTORE) .setTargetId("example.tld") .setRegistrarId("TheRegistrar") .setCost(Money.of(USD, 17)) .setPeriodYears(1) .setEventTime(clock.nowUtc()) .setBillingTime(clock.nowUtc()) .setDomainHistory(historyEntryDomainRestore) .build(), new BillingEvent.Builder() .setReason(Reason.RENEW) .setTargetId("example.tld") .setRegistrarId("TheRegistrar") .setCost(Money.of(USD, 11)) .setPeriodYears(1) .setEventTime(clock.nowUtc()) .setBillingTime(clock.nowUtc()) .setDomainHistory(historyEntryDomainRestore) .build()); } @Test void testSuccess_autorenewEndTimeIsCleared() throws Exception { setEppInput("domain_update_restore_request_fee.xml", FEE_06_MAP); persistPendingDeleteDomain(); persistResource( reloadResourceByForeignKey() .asBuilder() .setAutorenewEndTime(Optional.of(clock.nowUtc().plusYears(2))) .build()); assertThat(reloadResourceByForeignKey().getAutorenewEndTime()).isPresent(); runFlowAssertResponse(loadFile("domain_update_restore_request_response_fee.xml", FEE_06_MAP)); assertThat(reloadResourceByForeignKey().getAutorenewEndTime()).isEmpty(); } @Test void testSuccess_fee_v06() throws Exception { setEppInput("domain_update_restore_request_fee.xml", FEE_06_MAP); persistPendingDeleteDomain(); runFlowAssertResponse(loadFile("domain_update_restore_request_response_fee.xml", FEE_06_MAP)); } @Test void testSuccess_fee_v06_noRenewal() throws Exception { setEppInput("domain_update_restore_request_fee_no_renewal.xml", FEE_06_MAP); persistPendingDeleteDomain(clock.nowUtc().plusMonths(6)); runFlowAssertResponse( loadFile("domain_update_restore_request_response_fee_no_renewal.xml", FEE_06_MAP)); } @Test void testSuccess_fee_v11() throws Exception { setEppInput("domain_update_restore_request_fee.xml", FEE_11_MAP); persistPendingDeleteDomain(); runFlowAssertResponse(loadFile("domain_update_restore_request_response_fee.xml", FEE_11_MAP)); } @Test void testSuccess_fee_v12() throws Exception { setEppInput("domain_update_restore_request_fee.xml", FEE_12_MAP); persistPendingDeleteDomain(); runFlowAssertResponse(loadFile("domain_update_restore_request_response_fee.xml", FEE_12_MAP)); } @Test void testSuccess_fee_withDefaultAttributes_v06() throws Exception { setEppInput("domain_update_restore_request_fee_defaults.xml", FEE_06_MAP); persistPendingDeleteDomain(); runFlowAssertResponse(loadFile("domain_update_restore_request_response_fee.xml", FEE_06_MAP)); } @Test void testSuccess_fee_withDefaultAttributes_v11() throws Exception { setEppInput("domain_update_restore_request_fee_defaults.xml", FEE_11_MAP); persistPendingDeleteDomain(); runFlowAssertResponse(loadFile("domain_update_restore_request_response_fee.xml", FEE_11_MAP)); } @Test void testSuccess_fee_withDefaultAttributes_v12() throws Exception { setEppInput("domain_update_restore_request_fee_defaults.xml", FEE_12_MAP); persistPendingDeleteDomain(); runFlowAssertResponse(loadFile("domain_update_restore_request_response_fee.xml", FEE_12_MAP)); } @Test void testFailure_fee_unknownCurrency() { ImmutableMap<String, String> substitutions = ImmutableMap.of("FEE_VERSION", "0.12", "FEE_NS", "fee12", "CURRENCY", "BAD"); setEppInput("domain_update_restore_request_fee.xml", substitutions); EppException thrown = assertThrows(UnknownCurrencyEppException.class, this::persistPendingDeleteDomain); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testFailure_refundableFee_v06() throws Exception { setEppInput("domain_update_restore_request_fee_refundable.xml", FEE_06_MAP); persistPendingDeleteDomain(); EppException thrown = assertThrows(UnsupportedFeeAttributeException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testFailure_refundableFee_v11() throws Exception { setEppInput("domain_update_restore_request_fee_refundable.xml", FEE_11_MAP); persistPendingDeleteDomain(); EppException thrown = assertThrows(UnsupportedFeeAttributeException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testFailure_refundableFee_v12() throws Exception { setEppInput("domain_update_restore_request_fee_refundable.xml", FEE_12_MAP); persistPendingDeleteDomain(); EppException thrown = assertThrows(UnsupportedFeeAttributeException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testFailure_gracePeriodFee_v06() throws Exception { setEppInput("domain_update_restore_request_fee_grace_period.xml", FEE_06_MAP); persistPendingDeleteDomain(); EppException thrown = assertThrows(UnsupportedFeeAttributeException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testFailure_gracePeriodFee_v11() throws Exception { setEppInput("domain_update_restore_request_fee_grace_period.xml", FEE_11_MAP); persistPendingDeleteDomain(); EppException thrown = assertThrows(UnsupportedFeeAttributeException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testFailure_gracePeriodFee_v12() throws Exception { setEppInput("domain_update_restore_request_fee_grace_period.xml", FEE_12_MAP); persistPendingDeleteDomain(); EppException thrown = assertThrows(UnsupportedFeeAttributeException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testFailure_appliedFee_v06() throws Exception { setEppInput("domain_update_restore_request_fee_applied.xml", FEE_06_MAP); persistPendingDeleteDomain(); EppException thrown = assertThrows(UnsupportedFeeAttributeException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testFailure_appliedFee_v11() throws Exception { setEppInput("domain_update_restore_request_fee_applied.xml", FEE_11_MAP); persistPendingDeleteDomain(); EppException thrown = assertThrows(UnsupportedFeeAttributeException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testFailure_appliedFee_v12() throws Exception { setEppInput("domain_update_restore_request_fee_applied.xml", FEE_12_MAP); persistPendingDeleteDomain(); EppException thrown = assertThrows(UnsupportedFeeAttributeException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testSuccess_premiumNotBlocked() throws Exception { createTld("example"); setEppInput("domain_update_restore_request_premium.xml"); persistPendingDeleteDomain(); runFlowAssertResponse(loadFile("domain_update_restore_request_response_premium.xml")); } @Test void testSuccess_premiumNotBlocked_andNoRenewal() throws Exception { createTld("example"); setEppInput("domain_update_restore_request_premium_no_renewal.xml"); persistPendingDeleteDomain(clock.nowUtc().plusYears(2)); runFlowAssertResponse( loadFile("domain_update_restore_request_response_fee_no_renewal.xml", FEE_12_MAP)); } @Test void testSuccess_superuserOverridesReservedList() throws Exception { persistResource( Tld.get("tld") .asBuilder() .setReservedLists(persistReservedList("tld-reserved", "example,FULLY_BLOCKED")) .build()); persistPendingDeleteDomain(); runFlowAssertResponse( CommitMode.LIVE, UserPrivileges.SUPERUSER, loadFile("generic_success_response.xml")); } @Test void testSuccess_superuserOverridesPremiumNameBlock() throws Exception { createTld("example"); setEppInput("domain_update_restore_request_premium.xml"); persistPendingDeleteDomain(); // Modify the Registrar to block premium names. persistResource(loadRegistrar("TheRegistrar").asBuilder().setBlockPremiumNames(true).build()); runFlowAssertResponse( CommitMode.LIVE, UserPrivileges.SUPERUSER, loadFile("domain_update_restore_request_response_premium.xml")); } @Test void testSuccess_worksWithoutPollMessage() throws Exception { Domain domain = persistPendingDeleteDomain(); VKey<PollMessage.OneTime> deletePollMessage = domain.getDeletePollMessage(); persistResource(domain.asBuilder().setDeletePollMessage(null).build()); DatabaseHelper.deleteByKey(deletePollMessage); runFlowAssertResponse(loadFile("generic_success_response.xml")); } @Test void testFailure_doesNotExist() throws Exception { ResourceDoesNotExistException thrown = assertThrows(ResourceDoesNotExistException.class, this::runFlow); assertThat(thrown).hasMessageThat().contains(String.format("(%s)", getUniqueIdFromCommand())); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testFailure_suspendedRegistrarCantRestoreDomain() { persistResource( Registrar.loadByRegistrarId("TheRegistrar") .get() .asBuilder() .setState(State.SUSPENDED) .build()); EppException thrown = assertThrows(RegistrarMustBeActiveForThisOperationException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testFailure_pendingRegistrarCantRestoreDomain() { persistResource( Registrar.loadByRegistrarId("TheRegistrar") .get() .asBuilder() .setState(State.PENDING) .build()); EppException thrown = assertThrows(RegistrarMustBeActiveForThisOperationException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testFailure_wrongFeeAmount_v06() throws Exception { setEppInput("domain_update_restore_request_fee.xml", FEE_06_MAP); persistPendingDeleteDomain(); persistResource(Tld.get("tld").asBuilder().setRestoreBillingCost(Money.of(USD, 100)).build()); EppException thrown = assertThrows(FeesMismatchException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testFailure_wrongFeeAmount_v11() throws Exception { setEppInput("domain_update_restore_request_fee.xml", FEE_11_MAP); persistPendingDeleteDomain(); persistResource(Tld.get("tld").asBuilder().setRestoreBillingCost(Money.of(USD, 100)).build()); EppException thrown = assertThrows(FeesMismatchException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testFailure_wrongFeeAmount_v12() throws Exception { setEppInput("domain_update_restore_request_fee.xml", FEE_12_MAP); persistPendingDeleteDomain(); persistResource(Tld.get("tld").asBuilder().setRestoreBillingCost(Money.of(USD, 100)).build()); EppException thrown = assertThrows(FeesMismatchException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } private void runWrongCurrencyTest(Map<String, String> substitutions) throws Exception { setEppInput("domain_update_restore_request_fee.xml", substitutions); persistPendingDeleteDomain(); persistResource( Tld.get("tld") .asBuilder() .setCurrency(EUR) .setCreateBillingCostTransitions( ImmutableSortedMap.of(START_OF_TIME, Money.of(EUR, 13))) .setRestoreBillingCost(Money.of(EUR, 11)) .setRenewBillingCostTransitions(ImmutableSortedMap.of(START_OF_TIME, Money.of(EUR, 7))) .setEapFeeSchedule(ImmutableSortedMap.of(START_OF_TIME, Money.zero(EUR))) .setServerStatusChangeBillingCost(Money.of(EUR, 19)) .setRegistryLockOrUnlockBillingCost(Money.of(EUR, 0)) .build()); EppException thrown = assertThrows(CurrencyUnitMismatchException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testFailure_wrongCurrency_v06() throws Exception { runWrongCurrencyTest(FEE_06_MAP); } @Test void testFailure_wrongCurrency_v11() throws Exception { runWrongCurrencyTest(FEE_11_MAP); } @Test void testFailure_wrongCurrency_v12() throws Exception { runWrongCurrencyTest(FEE_12_MAP); } @Test void testFailure_feeGivenInWrongScale_v06() throws Exception { setEppInput("domain_update_restore_request_fee_bad_scale.xml", FEE_06_MAP); persistPendingDeleteDomain(); EppException thrown = assertThrows(CurrencyValueScaleException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testFailure_feeGivenInWrongScale_v11() throws Exception { setEppInput("domain_update_restore_request_fee_bad_scale.xml", FEE_11_MAP); persistPendingDeleteDomain(); EppException thrown = assertThrows(CurrencyValueScaleException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testFailure_feeGivenInWrongScale_v12() throws Exception { setEppInput("domain_update_restore_request_fee_bad_scale.xml", FEE_12_MAP); persistPendingDeleteDomain(); EppException thrown = assertThrows(CurrencyValueScaleException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testFailure_notInRedemptionPeriod() throws Exception { persistResource( DatabaseHelper.newDomain(getUniqueIdFromCommand()) .asBuilder() .setDeletionTime(clock.nowUtc().plusDays(4)) .setStatusValues(ImmutableSet.of(StatusValue.PENDING_DELETE)) .build()); EppException thrown = assertThrows(DomainNotEligibleForRestoreException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testFailure_notDeleted() throws Exception { persistActiveDomain(getUniqueIdFromCommand()); EppException thrown = assertThrows(DomainNotEligibleForRestoreException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testFailure_fullyDeleted() throws Exception { persistDeletedDomain(getUniqueIdFromCommand(), clock.nowUtc().minusDays(1)); EppException thrown = assertThrows(ResourceDoesNotExistException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testFailure_withChange() throws Exception { persistPendingDeleteDomain(); setEppInput("domain_update_restore_request_with_change.xml"); EppException thrown = assertThrows(RestoreCommandIncludesChangesException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testFailure_withAdd() throws Exception { persistPendingDeleteDomain(); setEppInput("domain_update_restore_request_with_add.xml"); EppException thrown = assertThrows(RestoreCommandIncludesChangesException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testFailure_withRemove() throws Exception { persistPendingDeleteDomain(); setEppInput("domain_update_restore_request_with_remove.xml"); EppException thrown = assertThrows(RestoreCommandIncludesChangesException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testFailure_withSecDnsExtension() throws Exception { persistPendingDeleteDomain(); setEppInput("domain_update_restore_request_with_secdns.xml"); EppException thrown = assertThrows(UnimplementedExtensionException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testFailure_unauthorizedClient() throws Exception { sessionMetadata.setRegistrarId("NewRegistrar"); persistPendingDeleteDomain(); EppException thrown = assertThrows(ResourceNotOwnedException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testSuccess_superuserUnauthorizedClient() throws Exception { sessionMetadata.setRegistrarId("NewRegistrar"); persistPendingDeleteDomain(); EppException thrown = assertThrows( ResourceNotOwnedException.class, () -> runFlowAssertResponse(loadFile("generic_success_response.xml"))); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testFailure_notAuthorizedForTld() throws Exception { persistResource( loadRegistrar("TheRegistrar").asBuilder().setAllowedTlds(ImmutableSet.of()).build()); persistPendingDeleteDomain(); EppException thrown = assertThrows(NotAuthorizedForTldException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testFailure_missingBillingAccount() throws Exception { persistPendingDeleteDomain(); persistResource( Tld.get("tld") .asBuilder() .setCurrency(JPY) .setCreateBillingCostTransitions( ImmutableSortedMap.of(START_OF_TIME, Money.ofMajor(JPY, 800))) .setEapFeeSchedule(ImmutableSortedMap.of(START_OF_TIME, Money.ofMajor(JPY, 800))) .setRenewBillingCostTransitions( ImmutableSortedMap.of(START_OF_TIME, Money.ofMajor(JPY, 800))) .setRegistryLockOrUnlockBillingCost(Money.ofMajor(JPY, 800)) .setServerStatusChangeBillingCost(Money.ofMajor(JPY, 800)) .setRestoreBillingCost(Money.ofMajor(JPY, 800)) .build()); EppException thrown = assertThrows(MissingBillingAccountMapException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testSuccess_superuserNotAuthorizedForTld() throws Exception { persistResource( loadRegistrar("TheRegistrar").asBuilder().setAllowedTlds(ImmutableSet.of()).build()); persistPendingDeleteDomain(); runFlowAssertResponse( CommitMode.LIVE, UserPrivileges.SUPERUSER, loadFile("generic_success_response.xml")); } @Test void testFailure_premiumBlocked() throws Exception { createTld("example"); setEppInput("domain_update_restore_request_premium.xml"); persistPendingDeleteDomain(); // Modify the Registrar to block premium names. persistResource(loadRegistrar("TheRegistrar").asBuilder().setBlockPremiumNames(true).build()); EppException thrown = assertThrows(PremiumNameBlockedException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testFailure_reservedBlocked() throws Exception { createTld("tld"); persistResource( Tld.get("tld") .asBuilder() .setReservedLists(persistReservedList("tld-reserved", "example,FULLY_BLOCKED")) .build()); persistPendingDeleteDomain(); EppException thrown = assertThrows(DomainReservedException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testFailure_premiumNotAcked() throws Exception { createTld("example"); setEppInput("domain_update_restore_request.xml", ImmutableMap.of("DOMAIN", "rich.example")); persistPendingDeleteDomain(); EppException thrown = assertThrows(FeesRequiredForPremiumNameException.class, this::runFlow); assertAboutEppExceptions().that(thrown).marshalsToXml(); } @Test void testIcannActivityReportField_getsLogged() throws Exception { persistPendingDeleteDomain(); runFlow(); assertIcannReportingActivityFieldLogged("srs-dom-rgp-restore-request"); assertTldsFieldLogged("tld"); } @Test void testIcannTransactionReportField_getsStored() throws Exception { persistPendingDeleteDomain(); runFlow(); Domain domain = reloadResourceByForeignKey(); DomainHistory historyEntryDomainRestore = (DomainHistory) getOnlyHistoryEntryOfType(domain, HistoryEntry.Type.DOMAIN_RESTORE); assertThat(historyEntryDomainRestore.getDomainTransactionRecords()) .containsExactly( DomainTransactionRecord.create( "tld", historyEntryDomainRestore.getModificationTime(), TransactionReportField.RESTORED_DOMAINS, 1)); } @Test void testFailure_restoreReportsAreNotSupported() { setEppInput("domain_update_restore_report.xml"); // This exception is referred to by its fully qualified path (rather than being imported) so // that it is not included in the list of exceptions thrown by DomainRestoreRequestFlow, as this // test EPP won't trigger the request flow at all. EppException thrown = assertThrows( google.registry.flows.EppException.UnimplementedCommandException.class, this::runFlow); assertThat(thrown).hasMessageThat().contains("domain restore reports are not supported"); assertAboutEppExceptions().that(thrown).marshalsToXml(); } }
googleapis/google-cloud-java
35,171
java-alloydb/proto-google-cloud-alloydb-v1/src/main/java/com/google/cloud/alloydb/v1/SwitchoverClusterRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/alloydb/v1/service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.alloydb.v1; /** * * * <pre> * Message for switching over to a cluster * </pre> * * Protobuf type {@code google.cloud.alloydb.v1.SwitchoverClusterRequest} */ public final class SwitchoverClusterRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.alloydb.v1.SwitchoverClusterRequest) SwitchoverClusterRequestOrBuilder { private static final long serialVersionUID = 0L; // Use SwitchoverClusterRequest.newBuilder() to construct. private SwitchoverClusterRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SwitchoverClusterRequest() { name_ = ""; requestId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new SwitchoverClusterRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.alloydb.v1.ServiceProto .internal_static_google_cloud_alloydb_v1_SwitchoverClusterRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.alloydb.v1.ServiceProto .internal_static_google_cloud_alloydb_v1_SwitchoverClusterRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.alloydb.v1.SwitchoverClusterRequest.class, com.google.cloud.alloydb.v1.SwitchoverClusterRequest.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * Required. The name of the resource. For the required format, see the * comment on the Cluster.name field * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Required. The name of the resource. For the required format, see the * comment on the Cluster.name field * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int REQUEST_ID_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server ignores the * request if it has already been completed. The server guarantees that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if the original operation with the same request ID * was received, and if so, ignores the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The requestId. */ @java.lang.Override public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server ignores the * request if it has already been completed. The server guarantees that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if the original operation with the same request ID * was received, and if so, ignores the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for requestId. */ @java.lang.Override public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int VALIDATE_ONLY_FIELD_NUMBER = 3; private boolean validateOnly_ = false; /** * * * <pre> * Optional. If set, performs request validation, for example, permission * checks and any other type of validation, but does not actually execute the * create request. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, requestId_); } if (validateOnly_ != false) { output.writeBool(3, validateOnly_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, requestId_); } if (validateOnly_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, validateOnly_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.alloydb.v1.SwitchoverClusterRequest)) { return super.equals(obj); } com.google.cloud.alloydb.v1.SwitchoverClusterRequest other = (com.google.cloud.alloydb.v1.SwitchoverClusterRequest) obj; if (!getName().equals(other.getName())) return false; if (!getRequestId().equals(other.getRequestId())) return false; if (getValidateOnly() != other.getValidateOnly()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER; hash = (53 * hash) + getRequestId().hashCode(); hash = (37 * hash) + VALIDATE_ONLY_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getValidateOnly()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.alloydb.v1.SwitchoverClusterRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.v1.SwitchoverClusterRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.v1.SwitchoverClusterRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.v1.SwitchoverClusterRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.v1.SwitchoverClusterRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.v1.SwitchoverClusterRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.v1.SwitchoverClusterRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.alloydb.v1.SwitchoverClusterRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.alloydb.v1.SwitchoverClusterRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.alloydb.v1.SwitchoverClusterRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.alloydb.v1.SwitchoverClusterRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.alloydb.v1.SwitchoverClusterRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.alloydb.v1.SwitchoverClusterRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Message for switching over to a cluster * </pre> * * Protobuf type {@code google.cloud.alloydb.v1.SwitchoverClusterRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.alloydb.v1.SwitchoverClusterRequest) com.google.cloud.alloydb.v1.SwitchoverClusterRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.alloydb.v1.ServiceProto .internal_static_google_cloud_alloydb_v1_SwitchoverClusterRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.alloydb.v1.ServiceProto .internal_static_google_cloud_alloydb_v1_SwitchoverClusterRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.alloydb.v1.SwitchoverClusterRequest.class, com.google.cloud.alloydb.v1.SwitchoverClusterRequest.Builder.class); } // Construct using com.google.cloud.alloydb.v1.SwitchoverClusterRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; requestId_ = ""; validateOnly_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.alloydb.v1.ServiceProto .internal_static_google_cloud_alloydb_v1_SwitchoverClusterRequest_descriptor; } @java.lang.Override public com.google.cloud.alloydb.v1.SwitchoverClusterRequest getDefaultInstanceForType() { return com.google.cloud.alloydb.v1.SwitchoverClusterRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.alloydb.v1.SwitchoverClusterRequest build() { com.google.cloud.alloydb.v1.SwitchoverClusterRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.alloydb.v1.SwitchoverClusterRequest buildPartial() { com.google.cloud.alloydb.v1.SwitchoverClusterRequest result = new com.google.cloud.alloydb.v1.SwitchoverClusterRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.alloydb.v1.SwitchoverClusterRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.requestId_ = requestId_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.validateOnly_ = validateOnly_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.alloydb.v1.SwitchoverClusterRequest) { return mergeFrom((com.google.cloud.alloydb.v1.SwitchoverClusterRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.alloydb.v1.SwitchoverClusterRequest other) { if (other == com.google.cloud.alloydb.v1.SwitchoverClusterRequest.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getRequestId().isEmpty()) { requestId_ = other.requestId_; bitField0_ |= 0x00000002; onChanged(); } if (other.getValidateOnly() != false) { setValidateOnly(other.getValidateOnly()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { requestId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { validateOnly_ = input.readBool(); bitField0_ |= 0x00000004; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * * * <pre> * Required. The name of the resource. For the required format, see the * comment on the Cluster.name field * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The name of the resource. For the required format, see the * comment on the Cluster.name field * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The name of the resource. For the required format, see the * comment on the Cluster.name field * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The name of the resource. For the required format, see the * comment on the Cluster.name field * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The name of the resource. For the required format, see the * comment on the Cluster.name field * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server ignores the * request if it has already been completed. The server guarantees that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if the original operation with the same request ID * was received, and if so, ignores the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The requestId. */ public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server ignores the * request if it has already been completed. The server guarantees that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if the original operation with the same request ID * was received, and if so, ignores the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for requestId. */ public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server ignores the * request if it has already been completed. The server guarantees that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if the original operation with the same request ID * was received, and if so, ignores the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The requestId to set. * @return This builder for chaining. */ public Builder setRequestId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } requestId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server ignores the * request if it has already been completed. The server guarantees that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if the original operation with the same request ID * was received, and if so, ignores the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearRequestId() { requestId_ = getDefaultInstance().getRequestId(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server ignores the * request if it has already been completed. The server guarantees that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if the original operation with the same request ID * was received, and if so, ignores the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for requestId to set. * @return This builder for chaining. */ public Builder setRequestIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); requestId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private boolean validateOnly_; /** * * * <pre> * Optional. If set, performs request validation, for example, permission * checks and any other type of validation, but does not actually execute the * create request. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } /** * * * <pre> * Optional. If set, performs request validation, for example, permission * checks and any other type of validation, but does not actually execute the * create request. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The validateOnly to set. * @return This builder for chaining. */ public Builder setValidateOnly(boolean value) { validateOnly_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. If set, performs request validation, for example, permission * checks and any other type of validation, but does not actually execute the * create request. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearValidateOnly() { bitField0_ = (bitField0_ & ~0x00000004); validateOnly_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.alloydb.v1.SwitchoverClusterRequest) } // @@protoc_insertion_point(class_scope:google.cloud.alloydb.v1.SwitchoverClusterRequest) private static final com.google.cloud.alloydb.v1.SwitchoverClusterRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.alloydb.v1.SwitchoverClusterRequest(); } public static com.google.cloud.alloydb.v1.SwitchoverClusterRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SwitchoverClusterRequest> PARSER = new com.google.protobuf.AbstractParser<SwitchoverClusterRequest>() { @java.lang.Override public SwitchoverClusterRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<SwitchoverClusterRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SwitchoverClusterRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.alloydb.v1.SwitchoverClusterRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/ignite
34,723
modules/clients/src/test/java/org/apache/ignite/jdbc/thin/JdbcThinStatementSelfTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.jdbc.thin; import java.io.Serializable; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.sql.Statement; import java.util.concurrent.Callable; import org.apache.ignite.IgniteCache; import org.apache.ignite.binary.BinaryInvalidTypeException; import org.apache.ignite.binary.BinaryObjectBuilder; import org.apache.ignite.cache.query.annotations.QuerySqlField; import org.apache.ignite.cache.query.annotations.QuerySqlFunction; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.processors.odbc.SqlStateCode; import org.apache.ignite.internal.util.lang.RunnableX; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.X; import org.apache.ignite.testframework.GridTestUtils; import org.junit.Ignore; import static org.apache.ignite.cache.CacheMode.PARTITIONED; import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC; /** * Statement test. */ @SuppressWarnings({"ThrowableNotThrown"}) public class JdbcThinStatementSelfTest extends JdbcThinAbstractSelfTest { /** URL. */ private String url = partitionAwareness ? "jdbc:ignite:thin://127.0.0.1:10800..10802?partitionAwareness=true" : "jdbc:ignite:thin://127.0.0.1?partitionAwareness=false"; /** Nodes count. */ private int nodesCnt = partitionAwareness ? 4 : 3; /** SQL query. */ private static final String SQL = "select * from Person where age > 30"; /** Connection. */ private Connection conn; /** Statement. */ private Statement stmt; /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName); CacheConfiguration<?, ?> cache = defaultCacheConfiguration(); cache.setCacheMode(PARTITIONED); cache.setBackups(1); cache.setWriteSynchronizationMode(FULL_SYNC); cache.setIndexedTypes( String.class, Person.class, Integer.class, Test.class ); cfg.setCacheConfiguration(cache); return cfg; } /** {@inheritDoc} */ @Override protected void beforeTestsStarted() throws Exception { super.beforeTestsStarted(); startGridsMultiThreaded(nodesCnt); fillCache(); } /** {@inheritDoc} */ @Override protected void beforeTest() throws Exception { conn = DriverManager.getConnection(url); conn.setSchema('"' + DEFAULT_CACHE_NAME + '"'); stmt = conn.createStatement(); assert stmt != null; assert !stmt.isClosed(); } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { if (stmt != null && !stmt.isClosed()) { stmt.close(); assert stmt.isClosed(); } conn.close(); assert stmt.isClosed(); assert conn.isClosed(); } /** * @throws Exception If failed. */ @org.junit.Test public void testExecuteQuery0() throws Exception { ResultSet rs = stmt.executeQuery(SQL); assertNotNull(rs); int cnt = 0; while (rs.next()) { int id = rs.getInt("id"); if (id == 2) { assertEquals("Joe", rs.getString("firstName")); assertEquals("Black", rs.getString("lastName")); assertEquals(35, rs.getInt("age")); } else if (id == 3) { assertEquals("Mike", rs.getString("firstName")); assertEquals("Green", rs.getString("lastName")); assertEquals(40, rs.getInt("age")); } else fail("Wrong ID: " + id); cnt++; } assertEquals(2, cnt); } /** * @throws Exception If failed. */ @org.junit.Test public void testExecuteQuery1() throws Exception { final String sqlText = "select val from test"; try (ResultSet rs = stmt.executeQuery(sqlText)) { assertNotNull(rs); assertTrue(rs.next()); int val = rs.getInt(1); assertTrue("Invalid val: " + val, val >= 1 && val <= 10); } stmt.close(); // Call on a closed statement checkStatementClosed(new RunnableX() { @Override public void runx() throws Exception { stmt.executeQuery(sqlText); } }); } /** * @throws Exception If failed. */ @org.junit.Test public void testExecute() throws Exception { assertTrue(stmt.execute(SQL)); assertEquals("Update count must be -1 for SELECT query", -1, stmt.getUpdateCount()); ResultSet rs = stmt.getResultSet(); assertNotNull(rs); int cnt = 0; while (rs.next()) { int id = rs.getInt("id"); if (id == 2) { assertEquals("Joe", rs.getString("firstName")); assertEquals("Black", rs.getString("lastName")); assertEquals(35, rs.getInt("age")); } else if (id == 3) { assertEquals( "Mike", rs.getString("firstName")); assertEquals( "Green", rs.getString("lastName")); assertEquals(40, rs.getInt("age")); } else fail("Wrong ID: " + id); cnt++; } assertEquals(2, cnt); assertFalse("Statement has more results.", stmt.getMoreResults()); } /** * @throws Exception If failed. */ @org.junit.Test public void testMaxRows() throws Exception { stmt.setMaxRows(1); assertEquals(1, stmt.getMaxRows()); ResultSet rs = stmt.executeQuery(SQL); assertNotNull(rs); int cnt = 0; while (rs.next()) { int id = rs.getInt("id"); if (id == 2) { assertEquals("Joe", rs.getString("firstName")); assertEquals("Black", rs.getString("lastName")); assertEquals(35, rs.getInt("age")); } else if (id == 3) { assertEquals( "Mike", rs.getString("firstName")); assertEquals( "Green", rs.getString("lastName")); assertEquals(40, rs.getInt("age")); } else fail("Wrong ID: " + id); cnt++; } assertEquals(1, cnt); stmt.setMaxRows(0); rs = stmt.executeQuery(SQL); assertNotNull(rs); cnt = 0; while (rs.next()) { int id = rs.getInt("id"); if (id == 2) { assertEquals("Joe", rs.getString("firstName")); assertEquals("Black", rs.getString("lastName")); assertEquals(35, rs.getInt("age")); } else if (id == 3) { assertEquals( "Mike", rs.getString("firstName")); assertEquals( "Green", rs.getString("lastName")); assertEquals(40, rs.getInt("age")); } else fail("Wrong ID: " + id); cnt++; } assertEquals(2, cnt); } /** * @throws Exception If failed. */ @org.junit.Test public void testCloseResultSet0() throws Exception { ResultSet rs0 = stmt.executeQuery(SQL); ResultSet rs1 = stmt.executeQuery(SQL); ResultSet rs2 = stmt.executeQuery(SQL); assertTrue("ResultSet must be implicitly closed after re-execute statement", rs0.isClosed()); assertTrue("ResultSet must be implicitly closed after re-execute statement", rs1.isClosed()); assertFalse("Last result set must be available", rs2.isClosed()); stmt.close(); assertTrue("ResultSet must be explicitly closed after close statement", rs2.isClosed()); } /** * @throws Exception If failed. */ @org.junit.Test public void testCloseResultSet1() throws Exception { stmt.execute(SQL); ResultSet rs = stmt.getResultSet(); stmt.close(); assertTrue("ResultSet must be explicitly closed after close statement", rs.isClosed()); } /** * @throws Exception If failed. */ @org.junit.Test public void testCloseResultSetByConnectionClose() throws Exception { ResultSet rs = stmt.executeQuery(SQL); conn.close(); assertTrue("Statement must be implicitly closed after close connection", stmt.isClosed()); assertTrue("ResultSet must be implicitly closed after close connection", rs.isClosed()); } /** * @throws Exception If failed. */ @org.junit.Test public void testCloseOnCompletionAfterQuery() throws Exception { assertFalse("Invalid default closeOnCompletion", stmt.isCloseOnCompletion()); ResultSet rs0 = stmt.executeQuery(SQL); ResultSet rs1 = stmt.executeQuery(SQL); assertTrue("Result set must be closed implicitly", rs0.isClosed()); assertFalse("Statement must not be closed", stmt.isClosed()); rs1.close(); assertFalse("Statement must not be closed", stmt.isClosed()); ResultSet rs2 = stmt.executeQuery(SQL); stmt.closeOnCompletion(); assertTrue("Invalid closeOnCompletion", stmt.isCloseOnCompletion()); rs2.close(); assertTrue("Statement must be closed", stmt.isClosed()); } /** * @throws Exception If failed. */ @org.junit.Test public void testCloseOnCompletionBeforeQuery() throws Exception { assertFalse("Invalid default closeOnCompletion", stmt.isCloseOnCompletion()); ResultSet rs0 = stmt.executeQuery(SQL); ResultSet rs1 = stmt.executeQuery(SQL); assertTrue("Result set must be closed implicitly", rs0.isClosed()); assertFalse("Statement must not be closed", stmt.isClosed()); rs1.close(); assertFalse("Statement must not be closed", stmt.isClosed()); stmt.closeOnCompletion(); ResultSet rs2 = stmt.executeQuery(SQL); assertTrue("Invalid closeOnCompletion", stmt.isCloseOnCompletion()); rs2.close(); assertTrue("Statement must be closed", stmt.isClosed()); } /** * @throws Exception If failed. */ @org.junit.Test public void testExecuteQueryMultipleOnlyResultSets() throws Exception { assertTrue(conn.getMetaData().supportsMultipleResultSets()); int stmtCnt = 10; StringBuilder sql = new StringBuilder(); for (int i = 0; i < stmtCnt; ++i) sql.append("select ").append(i).append("; "); assertTrue(stmt.execute(sql.toString())); for (int i = 0; i < stmtCnt - 1; ++i) { ResultSet rs = stmt.getResultSet(); assertTrue(rs.next()); assertEquals(i, rs.getInt(1)); assertFalse(rs.next()); assertTrue(stmt.getMoreResults()); } ResultSet rs = stmt.getResultSet(); assertTrue(rs.next()); assertEquals(stmtCnt - 1, rs.getInt(1)); assertFalse(rs.next()); assertFalse(stmt.getMoreResults()); } /** * @throws Exception If failed. */ @org.junit.Test public void testExecuteQueryMultipleOnlyDml() throws Exception { conn.setSchema(null); Statement stmt0 = conn.createStatement(); int stmtCnt = 10; StringBuilder sql = new StringBuilder("drop table if exists test; create table test(ID int primary key, NAME varchar(20)); "); for (int i = 0; i < stmtCnt; ++i) sql.append("insert into test (ID, NAME) values (" + i + ", 'name_" + i + "'); "); assertFalse(stmt0.execute(sql.toString())); // DROP TABLE statement assertNull(stmt0.getResultSet()); assertEquals(0, stmt0.getUpdateCount()); stmt0.getMoreResults(); // CREATE TABLE statement assertNull(stmt0.getResultSet()); assertEquals(0, stmt0.getUpdateCount()); for (int i = 0; i < stmtCnt; ++i) { assertTrue(stmt0.getMoreResults()); assertNull(stmt0.getResultSet()); assertEquals(1, stmt0.getUpdateCount()); } assertFalse(stmt0.getMoreResults()); } /** * @throws Exception If failed. */ @org.junit.Test public void testExecuteQueryMultipleMixed() throws Exception { conn.setSchema(null); Statement stmt0 = conn.createStatement(); int stmtCnt = 10; StringBuilder sql = new StringBuilder("drop table if exists test; create table test(ID int primary key, NAME varchar(20)); "); for (int i = 0; i < stmtCnt; ++i) { if (i % 2 == 0) sql.append(" insert into test (ID, NAME) values (" + i + ", 'name_" + i + "'); "); else sql.append(" select * from test where id < " + i + "; "); } assertFalse(stmt0.execute(sql.toString())); // DROP TABLE statement assertNull(stmt0.getResultSet()); assertEquals(0, stmt0.getUpdateCount()); assertTrue("Result set doesn't have more results.", stmt0.getMoreResults()); // CREATE TABLE statement assertNull(stmt0.getResultSet()); assertEquals(0, stmt0.getUpdateCount()); boolean notEmptyResult = false; for (int i = 0; i < stmtCnt; ++i) { assertTrue(stmt0.getMoreResults()); if (i % 2 == 0) { assertNull(stmt0.getResultSet()); assertEquals(1, stmt0.getUpdateCount()); } else { assertEquals(-1, stmt0.getUpdateCount()); ResultSet rs = stmt0.getResultSet(); int rowsCnt = 0; while (rs.next()) rowsCnt++; assertTrue(rowsCnt <= (i + 1) / 2); if (rowsCnt == (i + 1) / 2) notEmptyResult = true; } } assertTrue(notEmptyResult); assertFalse(stmt0.getMoreResults()); } /** * @throws Exception If failed. */ @org.junit.Test public void testExecuteUpdate() throws Exception { final String sqlText = "update test set val=1 where _key=1"; assertEquals(1, stmt.executeUpdate(sqlText)); stmt.close(); checkStatementClosed(new RunnableX() { @Override public void runx() throws Exception { stmt.executeUpdate(sqlText); } }); } /** * @throws Exception If failed. */ @org.junit.Test public void testExecuteUpdateProducesResultSet() throws Exception { final String sqlText = "select * from test"; GridTestUtils.assertThrows(log, new Callable<Object>() { @Override public Object call() throws Exception { return stmt.executeUpdate(sqlText); } }, SQLException.class, "Given statement type does not match that declared by JDBC driver" ); } /** * @throws Exception If failed. */ @org.junit.Test public void testClose() throws Exception { String sqlText = "select * from test"; ResultSet rs = stmt.executeQuery(sqlText); assertTrue(rs.next()); assertFalse(rs.isClosed()); assertFalse(stmt.isClosed()); stmt.close(); stmt.close(); // Closing closed is ok assertTrue(stmt.isClosed()); // Current result set must be closed assertTrue(rs.isClosed()); } /** * @throws Exception If failed. */ @org.junit.Test public void testGetSetMaxFieldSizeUnsupported() throws Exception { assertEquals(0, stmt.getMaxFieldSize()); GridTestUtils.assertThrows(log, new Callable<Object>() { @Override public Object call() throws Exception { stmt.setMaxFieldSize(100); return null; } }, SQLFeatureNotSupportedException.class, "Field size limitation is not supported" ); assertEquals(0, stmt.getMaxFieldSize()); stmt.close(); // Call on a closed statement checkStatementClosed(new RunnableX() { @Override public void runx() throws Exception { stmt.getMaxFieldSize(); } }); // Call on a closed statement checkStatementClosed(new RunnableX() { @Override public void runx() throws Exception { stmt.setMaxFieldSize(100); } }); } /** * @throws Exception If failed. */ @org.junit.Test public void testGetSetMaxRows() throws Exception { assertEquals(0, stmt.getMaxRows()); GridTestUtils.assertThrows(log, new Callable<Object>() { @Override public Object call() throws Exception { stmt.setMaxRows(-1); return null; } }, SQLException.class, "Invalid max rows value" ); assertEquals(0, stmt.getMaxRows()); final int maxRows = 1; stmt.setMaxRows(maxRows); assertEquals(maxRows, stmt.getMaxRows()); String sqlText = "select * from test"; ResultSet rs = stmt.executeQuery(sqlText); assertTrue(rs.next()); assertFalse(rs.next()); //max rows reached stmt.close(); // Call on a closed statement checkStatementClosed(new RunnableX() { @Override public void runx() throws Exception { stmt.getMaxRows(); } }); // Call on a closed statement checkStatementClosed(new RunnableX() { @Override public void runx() throws Exception { stmt.setMaxRows(maxRows); } }); } /** * @throws Exception If failed. */ @org.junit.Test @Ignore("https://issues.apache.org/jira/browse/IGNITE-5440") public void testSetEscapeProcessing() throws Exception { stmt.setEscapeProcessing(false); final String sqlText = "select {fn CONVERT(1, SQL_BOOLEAN)}"; GridTestUtils.assertThrows(log, new Callable<Object>() { @Override public Object call() throws Exception { return stmt.executeQuery(sqlText); } }, SQLException.class, "Failed to parse" ); ResultSet rs = stmt.executeQuery(sqlText); assertTrue(rs.next()); assertEquals(true, rs.getBoolean(1)); stmt.setEscapeProcessing(true); stmt.close(); checkStatementClosed(new RunnableX() { @Override public void runx() throws Exception { stmt.setEscapeProcessing(true); } }); } /** * @throws Exception If failed. */ @org.junit.Test public void testGetSetQueryTimeout() throws Exception { assertEquals(0, stmt.getQueryTimeout()); // Invalid argument GridTestUtils.assertThrows(log, new Callable<Object>() { @Override public Object call() throws Exception { stmt.setQueryTimeout(-1); return null; } }, SQLException.class, "Invalid timeout value" ); assertEquals(0, stmt.getQueryTimeout()); final int timeout = 3; stmt.setQueryTimeout(timeout); assertEquals(timeout, stmt.getQueryTimeout()); stmt.close(); // Call on a closed statement checkStatementClosed(new RunnableX() { @Override public void runx() throws Exception { stmt.getQueryTimeout(); } }); // Call on a closed statement checkStatementClosed(new RunnableX() { @Override public void runx() throws Exception { stmt.setQueryTimeout(timeout); } }); } /** * @throws Exception If failed. */ @org.junit.Test public void testMaxFieldSize() throws Exception { assertTrue(stmt.getMaxFieldSize() >= 0); GridTestUtils.assertThrows(log, new Callable<Object>() { @Override public Object call() throws Exception { stmt.setMaxFieldSize(-1); return null; } }, SQLException.class, "Invalid field limit" ); checkNotSupported(new RunnableX() { @Override public void runx() throws Exception { stmt.setMaxFieldSize(100); } }); } /** * @throws Exception If failed. */ @org.junit.Test public void testQueryTimeout() throws Exception { assertEquals("Default timeout invalid: " + stmt.getQueryTimeout(), 0, stmt.getQueryTimeout()); stmt.setQueryTimeout(10); assertEquals(10, stmt.getQueryTimeout()); stmt.close(); checkStatementClosed(new RunnableX() { @Override public void runx() throws Exception { stmt.getQueryTimeout(); } }); checkStatementClosed(new RunnableX() { @Override public void runx() throws Exception { stmt.setQueryTimeout(10); } }); } /** * @throws Exception If failed. */ @org.junit.Test public void testWarningsOnClosedStatement() throws Exception { stmt.clearWarnings(); assertNull(null, stmt.getWarnings()); stmt.close(); checkStatementClosed(new RunnableX() { @Override public void runx() throws Exception { stmt.getWarnings(); } }); checkStatementClosed(new RunnableX() { @Override public void runx() throws Exception { stmt.clearWarnings(); } }); } /** * @throws Exception If failed. */ @org.junit.Test public void testCursorName() throws Exception { checkNotSupported(new RunnableX() { @Override public void runx() throws Exception { stmt.setCursorName("test"); } }); stmt.close(); checkStatementClosed(new RunnableX() { @Override public void runx() throws Exception { stmt.setCursorName("test"); } }); } /** * @throws Exception If failed. */ @org.junit.Test public void testGetMoreResults() throws Exception { assertFalse(stmt.getMoreResults()); stmt.execute("select 1; "); ResultSet rs = stmt.getResultSet(); assertFalse(stmt.getMoreResults()); assertNull(stmt.getResultSet()); assertTrue(rs.isClosed()); stmt.close(); checkStatementClosed(new RunnableX() { @Override public void runx() throws Exception { stmt.getMoreResults(); } }); } /** * @throws Exception If failed. */ @org.junit.Test public void testGetMoreResultsKeepCurrent() throws Exception { assertFalse(stmt.getMoreResults(Statement.CLOSE_CURRENT_RESULT)); assertFalse(stmt.getMoreResults(Statement.KEEP_CURRENT_RESULT)); assertFalse(stmt.getMoreResults(Statement.CLOSE_ALL_RESULTS)); stmt.execute("select 1; "); ResultSet rs = stmt.getResultSet(); assertFalse(stmt.getMoreResults(Statement.KEEP_CURRENT_RESULT)); assertFalse(rs.isClosed()); stmt.close(); checkStatementClosed(new RunnableX() { @Override public void runx() throws Exception { stmt.getMoreResults(Statement.KEEP_CURRENT_RESULT); } }); } /** * @throws Exception If failed. */ @org.junit.Test public void testGetMoreResultsCloseAll() throws Exception { assertFalse(stmt.getMoreResults(Statement.CLOSE_CURRENT_RESULT)); assertFalse(stmt.getMoreResults(Statement.KEEP_CURRENT_RESULT)); assertFalse(stmt.getMoreResults(Statement.CLOSE_ALL_RESULTS)); stmt.execute("select 1; "); ResultSet rs = stmt.getResultSet(); assertFalse(stmt.getMoreResults(Statement.CLOSE_ALL_RESULTS)); stmt.close(); checkStatementClosed(new RunnableX() { @Override public void runx() throws Exception { stmt.getMoreResults(Statement.KEEP_CURRENT_RESULT); } }); } /** * Verifies that emty batch can be performed. * * @throws Exception If failed. */ @org.junit.Test public void testBatchEmpty() throws Exception { assertTrue(conn.getMetaData().supportsBatchUpdates()); stmt.addBatch(""); stmt.clearBatch(); // Just verify that no exception have been thrown. stmt.executeBatch(); } /** * @throws Exception If failed. */ @org.junit.Test public void testFetchDirection() throws Exception { assertEquals(ResultSet.FETCH_FORWARD, stmt.getFetchDirection()); GridTestUtils.assertThrows(log, new Callable<Object>() { @Override public Object call() throws Exception { stmt.setFetchDirection(ResultSet.FETCH_REVERSE); return null; } }, SQLFeatureNotSupportedException.class, "Only forward direction is supported." ); stmt.close(); checkStatementClosed(new RunnableX() { @Override public void runx() throws Exception { stmt.setFetchDirection(-1); } }); checkStatementClosed(new RunnableX() { @Override public void runx() throws Exception { stmt.getFetchDirection(); } }); } /** * @throws Exception If failed. */ @org.junit.Test public void testAutogenerated() throws Exception { GridTestUtils.assertThrows(log, new Callable<Object>() { @Override public Object call() throws Exception { stmt.executeUpdate("select 1", -1); return null; } }, SQLException.class, "Invalid autoGeneratedKeys value"); GridTestUtils.assertThrows(log, new Callable<Object>() { @Override public Object call() throws Exception { stmt.execute("select 1", -1); return null; } }, SQLException.class, "Invalid autoGeneratedKeys value"); assertFalse(conn.getMetaData().supportsGetGeneratedKeys()); checkNotSupported(new RunnableX() { @Override public void runx() throws Exception { stmt.getGeneratedKeys(); } }); checkNotSupported(new RunnableX() { @Override public void runx() throws Exception { stmt.executeUpdate("select 1", Statement.RETURN_GENERATED_KEYS); } }); checkNotSupported(new RunnableX() { @Override public void runx() throws Exception { stmt.executeUpdate("select 1", new int[] {1, 2}); } }); checkNotSupported(new RunnableX() { @Override public void runx() throws Exception { stmt.executeUpdate("select 1", new String[] {"a", "b"}); } }); checkNotSupported(new RunnableX() { @Override public void runx() throws Exception { stmt.execute("select 1", Statement.RETURN_GENERATED_KEYS); } }); checkNotSupported(new RunnableX() { @Override public void runx() throws Exception { stmt.execute("select 1", new int[] {1, 2}); } }); checkNotSupported(new RunnableX() { @Override public void runx() throws Exception { stmt.execute("select 1", new String[] {"a", "b"}); } }); } /** * @throws Exception If failed. */ @org.junit.Test public void testStatementTypeMismatchSelectForCachedQuery() throws Exception { // Put query to cache. stmt.executeQuery("select 1;"); GridTestUtils.assertThrows(log, new Callable<Object>() { @Override public Object call() throws Exception { stmt.executeUpdate("select 1;"); return null; } }, SQLException.class, "Given statement type does not match that declared by JDBC driver"); assertNull("Not results expected. Last statement is executed with exception", stmt.getResultSet()); } /** * @throws Exception If failed. */ @org.junit.Test public void testStatementTypeMismatchUpdate() throws Exception { GridTestUtils.assertThrows(log, new Callable<Object>() { @Override public Object call() throws Exception { stmt.executeQuery("update test set val=28 where _key=1"); return null; } }, SQLException.class, "Given statement type does not match that declared by JDBC driver"); ResultSet rs = stmt.executeQuery("select val from test where _key=1"); boolean next = rs.next(); assertTrue(next); assertEquals("The data must not be updated. " + "Because update statement is executed via 'executeQuery' method." + " Data [val=" + rs.getInt(1) + ']', 1, rs.getInt(1)); } /** * */ @org.junit.Test public void testExceptionOnDeserializeResponse() throws SQLException { try (Connection c = connect(grid(0), null)) { execute(c, "CREATE TABLE TEST_DESERIALIZE(id int primary key, name varchar, BINFIELD OTHER) WITH " + "\"cache_name=TEST_DESERIALIZE,VALUE_TYPE=TEST_TYPE\""); IgniteCache<Object, Object> cc = grid(0).cache("TEST_DESERIALIZE"); BinaryObjectBuilder bobFld = grid(0).binary().builder("TestType"); bobFld.setField("fld0", 0); BinaryObjectBuilder bob = grid(0).binary().builder("TEST_TYPE"); bob.setField("NAME", "name0"); bob.setField("BINFIELD", bobFld.build()); cc.put(0, bob.build()); try (Statement stmt = c.createStatement()) { SQLException ex = (SQLException)GridTestUtils.assertThrows( log, () -> stmt.executeQuery("SELECT * FROM TEST_DESERIALIZE"), SQLException.class, "Serialization error during sending an sql request" ); assertEquals(SqlStateCode.DATA_EXCEPTION, ex.getSQLState()); assertTrue(X.hasCause(ex, "TestType", BinaryInvalidTypeException.class)); ResultSet rs = stmt.executeQuery("SELECT id FROM TEST_DESERIALIZE"); rs.next(); assertEquals(0, rs.getInt(1)); } } } /** */ private void fillCache() { IgniteCache<String, Person> cachePerson = grid(0).cache(DEFAULT_CACHE_NAME); assertNotNull(cachePerson); cachePerson.put("p1", new Person(1, "John", "White", 25)); cachePerson.put("p2", new Person(2, "Joe", "Black", 35)); cachePerson.put("p3", new Person(3, "Mike", "Green", 40)); IgniteCache<Integer, Test> cacheTest = grid(0).cache(DEFAULT_CACHE_NAME); for (int i = 1; i <= 10; i++) cacheTest.put(i, new Test(i)); } /** */ @SuppressWarnings("unused") public static class Test { /** */ @QuerySqlField private int val; /** * @param val Value. */ public Test(int val) { this.val = val; } } /** * * @param v seconds to sleep * @return passed value */ @SuppressWarnings("unused") @QuerySqlFunction public static int sleep_func(int v) { try { Thread.sleep(v * 1000); } catch (InterruptedException ignored) { // No-op } return v; } /** * Person. */ private static class Person implements Serializable { /** ID. */ @QuerySqlField private final int id; /** First name. */ @QuerySqlField private final String firstName; /** Last name. */ @QuerySqlField private final String lastName; /** Age. */ @QuerySqlField private final int age; /** * @param id ID. * @param firstName First name. * @param lastName Last name. * @param age Age. */ private Person(int id, String firstName, String lastName, int age) { assert !F.isEmpty(firstName); assert !F.isEmpty(lastName); assert age > 0; this.id = id; this.firstName = firstName; this.lastName = lastName; this.age = age; } } }
googleapis/google-cloud-java
35,042
java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/GetIamPolicyDiskRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/compute/v1/compute.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.compute.v1; /** * * * <pre> * A request message for Disks.GetIamPolicy. See the method description for details. * </pre> * * Protobuf type {@code google.cloud.compute.v1.GetIamPolicyDiskRequest} */ public final class GetIamPolicyDiskRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.compute.v1.GetIamPolicyDiskRequest) GetIamPolicyDiskRequestOrBuilder { private static final long serialVersionUID = 0L; // Use GetIamPolicyDiskRequest.newBuilder() to construct. private GetIamPolicyDiskRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private GetIamPolicyDiskRequest() { project_ = ""; resource_ = ""; zone_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new GetIamPolicyDiskRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_GetIamPolicyDiskRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_GetIamPolicyDiskRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.GetIamPolicyDiskRequest.class, com.google.cloud.compute.v1.GetIamPolicyDiskRequest.Builder.class); } private int bitField0_; public static final int OPTIONS_REQUESTED_POLICY_VERSION_FIELD_NUMBER = 499220029; private int optionsRequestedPolicyVersion_ = 0; /** * * * <pre> * Requested IAM Policy version. * </pre> * * <code>optional int32 options_requested_policy_version = 499220029;</code> * * @return Whether the optionsRequestedPolicyVersion field is set. */ @java.lang.Override public boolean hasOptionsRequestedPolicyVersion() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Requested IAM Policy version. * </pre> * * <code>optional int32 options_requested_policy_version = 499220029;</code> * * @return The optionsRequestedPolicyVersion. */ @java.lang.Override public int getOptionsRequestedPolicyVersion() { return optionsRequestedPolicyVersion_; } public static final int PROJECT_FIELD_NUMBER = 227560217; @SuppressWarnings("serial") private volatile java.lang.Object project_ = ""; /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The project. */ @java.lang.Override public java.lang.String getProject() { java.lang.Object ref = project_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); project_ = s; return s; } } /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for project. */ @java.lang.Override public com.google.protobuf.ByteString getProjectBytes() { java.lang.Object ref = project_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); project_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int RESOURCE_FIELD_NUMBER = 195806222; @SuppressWarnings("serial") private volatile java.lang.Object resource_ = ""; /** * * * <pre> * Name or id of the resource for this request. * </pre> * * <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The resource. */ @java.lang.Override public java.lang.String getResource() { java.lang.Object ref = resource_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resource_ = s; return s; } } /** * * * <pre> * Name or id of the resource for this request. * </pre> * * <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for resource. */ @java.lang.Override public com.google.protobuf.ByteString getResourceBytes() { java.lang.Object ref = resource_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); resource_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ZONE_FIELD_NUMBER = 3744684; @SuppressWarnings("serial") private volatile java.lang.Object zone_ = ""; /** * * * <pre> * The name of the zone for this request. * </pre> * * <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The zone. */ @java.lang.Override public java.lang.String getZone() { java.lang.Object ref = zone_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); zone_ = s; return s; } } /** * * * <pre> * The name of the zone for this request. * </pre> * * <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for zone. */ @java.lang.Override public com.google.protobuf.ByteString getZoneBytes() { java.lang.Object ref = zone_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); zone_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(zone_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3744684, zone_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 195806222, resource_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 227560217, project_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeInt32(499220029, optionsRequestedPolicyVersion_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(zone_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3744684, zone_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(195806222, resource_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(227560217, project_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeInt32Size( 499220029, optionsRequestedPolicyVersion_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.compute.v1.GetIamPolicyDiskRequest)) { return super.equals(obj); } com.google.cloud.compute.v1.GetIamPolicyDiskRequest other = (com.google.cloud.compute.v1.GetIamPolicyDiskRequest) obj; if (hasOptionsRequestedPolicyVersion() != other.hasOptionsRequestedPolicyVersion()) return false; if (hasOptionsRequestedPolicyVersion()) { if (getOptionsRequestedPolicyVersion() != other.getOptionsRequestedPolicyVersion()) return false; } if (!getProject().equals(other.getProject())) return false; if (!getResource().equals(other.getResource())) return false; if (!getZone().equals(other.getZone())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasOptionsRequestedPolicyVersion()) { hash = (37 * hash) + OPTIONS_REQUESTED_POLICY_VERSION_FIELD_NUMBER; hash = (53 * hash) + getOptionsRequestedPolicyVersion(); } hash = (37 * hash) + PROJECT_FIELD_NUMBER; hash = (53 * hash) + getProject().hashCode(); hash = (37 * hash) + RESOURCE_FIELD_NUMBER; hash = (53 * hash) + getResource().hashCode(); hash = (37 * hash) + ZONE_FIELD_NUMBER; hash = (53 * hash) + getZone().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.compute.v1.GetIamPolicyDiskRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.GetIamPolicyDiskRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.GetIamPolicyDiskRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.GetIamPolicyDiskRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.GetIamPolicyDiskRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.GetIamPolicyDiskRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.GetIamPolicyDiskRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.GetIamPolicyDiskRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.GetIamPolicyDiskRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.GetIamPolicyDiskRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.GetIamPolicyDiskRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.GetIamPolicyDiskRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.compute.v1.GetIamPolicyDiskRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A request message for Disks.GetIamPolicy. See the method description for details. * </pre> * * Protobuf type {@code google.cloud.compute.v1.GetIamPolicyDiskRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.GetIamPolicyDiskRequest) com.google.cloud.compute.v1.GetIamPolicyDiskRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_GetIamPolicyDiskRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_GetIamPolicyDiskRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.GetIamPolicyDiskRequest.class, com.google.cloud.compute.v1.GetIamPolicyDiskRequest.Builder.class); } // Construct using com.google.cloud.compute.v1.GetIamPolicyDiskRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; optionsRequestedPolicyVersion_ = 0; project_ = ""; resource_ = ""; zone_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_GetIamPolicyDiskRequest_descriptor; } @java.lang.Override public com.google.cloud.compute.v1.GetIamPolicyDiskRequest getDefaultInstanceForType() { return com.google.cloud.compute.v1.GetIamPolicyDiskRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.compute.v1.GetIamPolicyDiskRequest build() { com.google.cloud.compute.v1.GetIamPolicyDiskRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.compute.v1.GetIamPolicyDiskRequest buildPartial() { com.google.cloud.compute.v1.GetIamPolicyDiskRequest result = new com.google.cloud.compute.v1.GetIamPolicyDiskRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.compute.v1.GetIamPolicyDiskRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.optionsRequestedPolicyVersion_ = optionsRequestedPolicyVersion_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.project_ = project_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.resource_ = resource_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.zone_ = zone_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.compute.v1.GetIamPolicyDiskRequest) { return mergeFrom((com.google.cloud.compute.v1.GetIamPolicyDiskRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.compute.v1.GetIamPolicyDiskRequest other) { if (other == com.google.cloud.compute.v1.GetIamPolicyDiskRequest.getDefaultInstance()) return this; if (other.hasOptionsRequestedPolicyVersion()) { setOptionsRequestedPolicyVersion(other.getOptionsRequestedPolicyVersion()); } if (!other.getProject().isEmpty()) { project_ = other.project_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getResource().isEmpty()) { resource_ = other.resource_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getZone().isEmpty()) { zone_ = other.zone_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 29957474: { zone_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 29957474 case 1566449778: { resource_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 1566449778 case 1820481738: { project_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 1820481738 case -301207064: { optionsRequestedPolicyVersion_ = input.readInt32(); bitField0_ |= 0x00000001; break; } // case -301207064 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int optionsRequestedPolicyVersion_; /** * * * <pre> * Requested IAM Policy version. * </pre> * * <code>optional int32 options_requested_policy_version = 499220029;</code> * * @return Whether the optionsRequestedPolicyVersion field is set. */ @java.lang.Override public boolean hasOptionsRequestedPolicyVersion() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Requested IAM Policy version. * </pre> * * <code>optional int32 options_requested_policy_version = 499220029;</code> * * @return The optionsRequestedPolicyVersion. */ @java.lang.Override public int getOptionsRequestedPolicyVersion() { return optionsRequestedPolicyVersion_; } /** * * * <pre> * Requested IAM Policy version. * </pre> * * <code>optional int32 options_requested_policy_version = 499220029;</code> * * @param value The optionsRequestedPolicyVersion to set. * @return This builder for chaining. */ public Builder setOptionsRequestedPolicyVersion(int value) { optionsRequestedPolicyVersion_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Requested IAM Policy version. * </pre> * * <code>optional int32 options_requested_policy_version = 499220029;</code> * * @return This builder for chaining. */ public Builder clearOptionsRequestedPolicyVersion() { bitField0_ = (bitField0_ & ~0x00000001); optionsRequestedPolicyVersion_ = 0; onChanged(); return this; } private java.lang.Object project_ = ""; /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The project. */ public java.lang.String getProject() { java.lang.Object ref = project_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); project_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for project. */ public com.google.protobuf.ByteString getProjectBytes() { java.lang.Object ref = project_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); project_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The project to set. * @return This builder for chaining. */ public Builder setProject(java.lang.String value) { if (value == null) { throw new NullPointerException(); } project_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearProject() { project_ = getDefaultInstance().getProject(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for project to set. * @return This builder for chaining. */ public Builder setProjectBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); project_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object resource_ = ""; /** * * * <pre> * Name or id of the resource for this request. * </pre> * * <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The resource. */ public java.lang.String getResource() { java.lang.Object ref = resource_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resource_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Name or id of the resource for this request. * </pre> * * <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for resource. */ public com.google.protobuf.ByteString getResourceBytes() { java.lang.Object ref = resource_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); resource_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Name or id of the resource for this request. * </pre> * * <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The resource to set. * @return This builder for chaining. */ public Builder setResource(java.lang.String value) { if (value == null) { throw new NullPointerException(); } resource_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Name or id of the resource for this request. * </pre> * * <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearResource() { resource_ = getDefaultInstance().getResource(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Name or id of the resource for this request. * </pre> * * <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for resource to set. * @return This builder for chaining. */ public Builder setResourceBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); resource_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object zone_ = ""; /** * * * <pre> * The name of the zone for this request. * </pre> * * <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The zone. */ public java.lang.String getZone() { java.lang.Object ref = zone_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); zone_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The name of the zone for this request. * </pre> * * <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for zone. */ public com.google.protobuf.ByteString getZoneBytes() { java.lang.Object ref = zone_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); zone_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The name of the zone for this request. * </pre> * * <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The zone to set. * @return This builder for chaining. */ public Builder setZone(java.lang.String value) { if (value == null) { throw new NullPointerException(); } zone_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * The name of the zone for this request. * </pre> * * <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearZone() { zone_ = getDefaultInstance().getZone(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * The name of the zone for this request. * </pre> * * <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for zone to set. * @return This builder for chaining. */ public Builder setZoneBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); zone_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.GetIamPolicyDiskRequest) } // @@protoc_insertion_point(class_scope:google.cloud.compute.v1.GetIamPolicyDiskRequest) private static final com.google.cloud.compute.v1.GetIamPolicyDiskRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.compute.v1.GetIamPolicyDiskRequest(); } public static com.google.cloud.compute.v1.GetIamPolicyDiskRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<GetIamPolicyDiskRequest> PARSER = new com.google.protobuf.AbstractParser<GetIamPolicyDiskRequest>() { @java.lang.Override public GetIamPolicyDiskRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<GetIamPolicyDiskRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<GetIamPolicyDiskRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.compute.v1.GetIamPolicyDiskRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,068
java-dataflow/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/WorkerDetails.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/dataflow/v1beta3/metrics.proto // Protobuf Java Version: 3.25.8 package com.google.dataflow.v1beta3; /** * * * <pre> * Information about a worker * </pre> * * Protobuf type {@code google.dataflow.v1beta3.WorkerDetails} */ public final class WorkerDetails extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.dataflow.v1beta3.WorkerDetails) WorkerDetailsOrBuilder { private static final long serialVersionUID = 0L; // Use WorkerDetails.newBuilder() to construct. private WorkerDetails(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private WorkerDetails() { workerName_ = ""; workItems_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new WorkerDetails(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.dataflow.v1beta3.MetricsProto .internal_static_google_dataflow_v1beta3_WorkerDetails_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.dataflow.v1beta3.MetricsProto .internal_static_google_dataflow_v1beta3_WorkerDetails_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.dataflow.v1beta3.WorkerDetails.class, com.google.dataflow.v1beta3.WorkerDetails.Builder.class); } public static final int WORKER_NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object workerName_ = ""; /** * * * <pre> * Name of this worker * </pre> * * <code>string worker_name = 1;</code> * * @return The workerName. */ @java.lang.Override public java.lang.String getWorkerName() { java.lang.Object ref = workerName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); workerName_ = s; return s; } } /** * * * <pre> * Name of this worker * </pre> * * <code>string worker_name = 1;</code> * * @return The bytes for workerName. */ @java.lang.Override public com.google.protobuf.ByteString getWorkerNameBytes() { java.lang.Object ref = workerName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); workerName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int WORK_ITEMS_FIELD_NUMBER = 2; @SuppressWarnings("serial") private java.util.List<com.google.dataflow.v1beta3.WorkItemDetails> workItems_; /** * * * <pre> * Work items processed by this worker, sorted by time. * </pre> * * <code>repeated .google.dataflow.v1beta3.WorkItemDetails work_items = 2;</code> */ @java.lang.Override public java.util.List<com.google.dataflow.v1beta3.WorkItemDetails> getWorkItemsList() { return workItems_; } /** * * * <pre> * Work items processed by this worker, sorted by time. * </pre> * * <code>repeated .google.dataflow.v1beta3.WorkItemDetails work_items = 2;</code> */ @java.lang.Override public java.util.List<? extends com.google.dataflow.v1beta3.WorkItemDetailsOrBuilder> getWorkItemsOrBuilderList() { return workItems_; } /** * * * <pre> * Work items processed by this worker, sorted by time. * </pre> * * <code>repeated .google.dataflow.v1beta3.WorkItemDetails work_items = 2;</code> */ @java.lang.Override public int getWorkItemsCount() { return workItems_.size(); } /** * * * <pre> * Work items processed by this worker, sorted by time. * </pre> * * <code>repeated .google.dataflow.v1beta3.WorkItemDetails work_items = 2;</code> */ @java.lang.Override public com.google.dataflow.v1beta3.WorkItemDetails getWorkItems(int index) { return workItems_.get(index); } /** * * * <pre> * Work items processed by this worker, sorted by time. * </pre> * * <code>repeated .google.dataflow.v1beta3.WorkItemDetails work_items = 2;</code> */ @java.lang.Override public com.google.dataflow.v1beta3.WorkItemDetailsOrBuilder getWorkItemsOrBuilder(int index) { return workItems_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(workerName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, workerName_); } for (int i = 0; i < workItems_.size(); i++) { output.writeMessage(2, workItems_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(workerName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, workerName_); } for (int i = 0; i < workItems_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, workItems_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.dataflow.v1beta3.WorkerDetails)) { return super.equals(obj); } com.google.dataflow.v1beta3.WorkerDetails other = (com.google.dataflow.v1beta3.WorkerDetails) obj; if (!getWorkerName().equals(other.getWorkerName())) return false; if (!getWorkItemsList().equals(other.getWorkItemsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + WORKER_NAME_FIELD_NUMBER; hash = (53 * hash) + getWorkerName().hashCode(); if (getWorkItemsCount() > 0) { hash = (37 * hash) + WORK_ITEMS_FIELD_NUMBER; hash = (53 * hash) + getWorkItemsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.dataflow.v1beta3.WorkerDetails parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.dataflow.v1beta3.WorkerDetails parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.dataflow.v1beta3.WorkerDetails parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.dataflow.v1beta3.WorkerDetails parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.dataflow.v1beta3.WorkerDetails parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.dataflow.v1beta3.WorkerDetails parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.dataflow.v1beta3.WorkerDetails parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.dataflow.v1beta3.WorkerDetails parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.dataflow.v1beta3.WorkerDetails parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.dataflow.v1beta3.WorkerDetails parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.dataflow.v1beta3.WorkerDetails parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.dataflow.v1beta3.WorkerDetails parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.dataflow.v1beta3.WorkerDetails prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Information about a worker * </pre> * * Protobuf type {@code google.dataflow.v1beta3.WorkerDetails} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.dataflow.v1beta3.WorkerDetails) com.google.dataflow.v1beta3.WorkerDetailsOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.dataflow.v1beta3.MetricsProto .internal_static_google_dataflow_v1beta3_WorkerDetails_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.dataflow.v1beta3.MetricsProto .internal_static_google_dataflow_v1beta3_WorkerDetails_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.dataflow.v1beta3.WorkerDetails.class, com.google.dataflow.v1beta3.WorkerDetails.Builder.class); } // Construct using com.google.dataflow.v1beta3.WorkerDetails.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; workerName_ = ""; if (workItemsBuilder_ == null) { workItems_ = java.util.Collections.emptyList(); } else { workItems_ = null; workItemsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.dataflow.v1beta3.MetricsProto .internal_static_google_dataflow_v1beta3_WorkerDetails_descriptor; } @java.lang.Override public com.google.dataflow.v1beta3.WorkerDetails getDefaultInstanceForType() { return com.google.dataflow.v1beta3.WorkerDetails.getDefaultInstance(); } @java.lang.Override public com.google.dataflow.v1beta3.WorkerDetails build() { com.google.dataflow.v1beta3.WorkerDetails result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.dataflow.v1beta3.WorkerDetails buildPartial() { com.google.dataflow.v1beta3.WorkerDetails result = new com.google.dataflow.v1beta3.WorkerDetails(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(com.google.dataflow.v1beta3.WorkerDetails result) { if (workItemsBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { workItems_ = java.util.Collections.unmodifiableList(workItems_); bitField0_ = (bitField0_ & ~0x00000002); } result.workItems_ = workItems_; } else { result.workItems_ = workItemsBuilder_.build(); } } private void buildPartial0(com.google.dataflow.v1beta3.WorkerDetails result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.workerName_ = workerName_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.dataflow.v1beta3.WorkerDetails) { return mergeFrom((com.google.dataflow.v1beta3.WorkerDetails) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.dataflow.v1beta3.WorkerDetails other) { if (other == com.google.dataflow.v1beta3.WorkerDetails.getDefaultInstance()) return this; if (!other.getWorkerName().isEmpty()) { workerName_ = other.workerName_; bitField0_ |= 0x00000001; onChanged(); } if (workItemsBuilder_ == null) { if (!other.workItems_.isEmpty()) { if (workItems_.isEmpty()) { workItems_ = other.workItems_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureWorkItemsIsMutable(); workItems_.addAll(other.workItems_); } onChanged(); } } else { if (!other.workItems_.isEmpty()) { if (workItemsBuilder_.isEmpty()) { workItemsBuilder_.dispose(); workItemsBuilder_ = null; workItems_ = other.workItems_; bitField0_ = (bitField0_ & ~0x00000002); workItemsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getWorkItemsFieldBuilder() : null; } else { workItemsBuilder_.addAllMessages(other.workItems_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { workerName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { com.google.dataflow.v1beta3.WorkItemDetails m = input.readMessage( com.google.dataflow.v1beta3.WorkItemDetails.parser(), extensionRegistry); if (workItemsBuilder_ == null) { ensureWorkItemsIsMutable(); workItems_.add(m); } else { workItemsBuilder_.addMessage(m); } break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object workerName_ = ""; /** * * * <pre> * Name of this worker * </pre> * * <code>string worker_name = 1;</code> * * @return The workerName. */ public java.lang.String getWorkerName() { java.lang.Object ref = workerName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); workerName_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Name of this worker * </pre> * * <code>string worker_name = 1;</code> * * @return The bytes for workerName. */ public com.google.protobuf.ByteString getWorkerNameBytes() { java.lang.Object ref = workerName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); workerName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Name of this worker * </pre> * * <code>string worker_name = 1;</code> * * @param value The workerName to set. * @return This builder for chaining. */ public Builder setWorkerName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } workerName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Name of this worker * </pre> * * <code>string worker_name = 1;</code> * * @return This builder for chaining. */ public Builder clearWorkerName() { workerName_ = getDefaultInstance().getWorkerName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Name of this worker * </pre> * * <code>string worker_name = 1;</code> * * @param value The bytes for workerName to set. * @return This builder for chaining. */ public Builder setWorkerNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); workerName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.util.List<com.google.dataflow.v1beta3.WorkItemDetails> workItems_ = java.util.Collections.emptyList(); private void ensureWorkItemsIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { workItems_ = new java.util.ArrayList<com.google.dataflow.v1beta3.WorkItemDetails>(workItems_); bitField0_ |= 0x00000002; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.dataflow.v1beta3.WorkItemDetails, com.google.dataflow.v1beta3.WorkItemDetails.Builder, com.google.dataflow.v1beta3.WorkItemDetailsOrBuilder> workItemsBuilder_; /** * * * <pre> * Work items processed by this worker, sorted by time. * </pre> * * <code>repeated .google.dataflow.v1beta3.WorkItemDetails work_items = 2;</code> */ public java.util.List<com.google.dataflow.v1beta3.WorkItemDetails> getWorkItemsList() { if (workItemsBuilder_ == null) { return java.util.Collections.unmodifiableList(workItems_); } else { return workItemsBuilder_.getMessageList(); } } /** * * * <pre> * Work items processed by this worker, sorted by time. * </pre> * * <code>repeated .google.dataflow.v1beta3.WorkItemDetails work_items = 2;</code> */ public int getWorkItemsCount() { if (workItemsBuilder_ == null) { return workItems_.size(); } else { return workItemsBuilder_.getCount(); } } /** * * * <pre> * Work items processed by this worker, sorted by time. * </pre> * * <code>repeated .google.dataflow.v1beta3.WorkItemDetails work_items = 2;</code> */ public com.google.dataflow.v1beta3.WorkItemDetails getWorkItems(int index) { if (workItemsBuilder_ == null) { return workItems_.get(index); } else { return workItemsBuilder_.getMessage(index); } } /** * * * <pre> * Work items processed by this worker, sorted by time. * </pre> * * <code>repeated .google.dataflow.v1beta3.WorkItemDetails work_items = 2;</code> */ public Builder setWorkItems(int index, com.google.dataflow.v1beta3.WorkItemDetails value) { if (workItemsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureWorkItemsIsMutable(); workItems_.set(index, value); onChanged(); } else { workItemsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Work items processed by this worker, sorted by time. * </pre> * * <code>repeated .google.dataflow.v1beta3.WorkItemDetails work_items = 2;</code> */ public Builder setWorkItems( int index, com.google.dataflow.v1beta3.WorkItemDetails.Builder builderForValue) { if (workItemsBuilder_ == null) { ensureWorkItemsIsMutable(); workItems_.set(index, builderForValue.build()); onChanged(); } else { workItemsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Work items processed by this worker, sorted by time. * </pre> * * <code>repeated .google.dataflow.v1beta3.WorkItemDetails work_items = 2;</code> */ public Builder addWorkItems(com.google.dataflow.v1beta3.WorkItemDetails value) { if (workItemsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureWorkItemsIsMutable(); workItems_.add(value); onChanged(); } else { workItemsBuilder_.addMessage(value); } return this; } /** * * * <pre> * Work items processed by this worker, sorted by time. * </pre> * * <code>repeated .google.dataflow.v1beta3.WorkItemDetails work_items = 2;</code> */ public Builder addWorkItems(int index, com.google.dataflow.v1beta3.WorkItemDetails value) { if (workItemsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureWorkItemsIsMutable(); workItems_.add(index, value); onChanged(); } else { workItemsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Work items processed by this worker, sorted by time. * </pre> * * <code>repeated .google.dataflow.v1beta3.WorkItemDetails work_items = 2;</code> */ public Builder addWorkItems( com.google.dataflow.v1beta3.WorkItemDetails.Builder builderForValue) { if (workItemsBuilder_ == null) { ensureWorkItemsIsMutable(); workItems_.add(builderForValue.build()); onChanged(); } else { workItemsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Work items processed by this worker, sorted by time. * </pre> * * <code>repeated .google.dataflow.v1beta3.WorkItemDetails work_items = 2;</code> */ public Builder addWorkItems( int index, com.google.dataflow.v1beta3.WorkItemDetails.Builder builderForValue) { if (workItemsBuilder_ == null) { ensureWorkItemsIsMutable(); workItems_.add(index, builderForValue.build()); onChanged(); } else { workItemsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Work items processed by this worker, sorted by time. * </pre> * * <code>repeated .google.dataflow.v1beta3.WorkItemDetails work_items = 2;</code> */ public Builder addAllWorkItems( java.lang.Iterable<? extends com.google.dataflow.v1beta3.WorkItemDetails> values) { if (workItemsBuilder_ == null) { ensureWorkItemsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, workItems_); onChanged(); } else { workItemsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Work items processed by this worker, sorted by time. * </pre> * * <code>repeated .google.dataflow.v1beta3.WorkItemDetails work_items = 2;</code> */ public Builder clearWorkItems() { if (workItemsBuilder_ == null) { workItems_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { workItemsBuilder_.clear(); } return this; } /** * * * <pre> * Work items processed by this worker, sorted by time. * </pre> * * <code>repeated .google.dataflow.v1beta3.WorkItemDetails work_items = 2;</code> */ public Builder removeWorkItems(int index) { if (workItemsBuilder_ == null) { ensureWorkItemsIsMutable(); workItems_.remove(index); onChanged(); } else { workItemsBuilder_.remove(index); } return this; } /** * * * <pre> * Work items processed by this worker, sorted by time. * </pre> * * <code>repeated .google.dataflow.v1beta3.WorkItemDetails work_items = 2;</code> */ public com.google.dataflow.v1beta3.WorkItemDetails.Builder getWorkItemsBuilder(int index) { return getWorkItemsFieldBuilder().getBuilder(index); } /** * * * <pre> * Work items processed by this worker, sorted by time. * </pre> * * <code>repeated .google.dataflow.v1beta3.WorkItemDetails work_items = 2;</code> */ public com.google.dataflow.v1beta3.WorkItemDetailsOrBuilder getWorkItemsOrBuilder(int index) { if (workItemsBuilder_ == null) { return workItems_.get(index); } else { return workItemsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Work items processed by this worker, sorted by time. * </pre> * * <code>repeated .google.dataflow.v1beta3.WorkItemDetails work_items = 2;</code> */ public java.util.List<? extends com.google.dataflow.v1beta3.WorkItemDetailsOrBuilder> getWorkItemsOrBuilderList() { if (workItemsBuilder_ != null) { return workItemsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(workItems_); } } /** * * * <pre> * Work items processed by this worker, sorted by time. * </pre> * * <code>repeated .google.dataflow.v1beta3.WorkItemDetails work_items = 2;</code> */ public com.google.dataflow.v1beta3.WorkItemDetails.Builder addWorkItemsBuilder() { return getWorkItemsFieldBuilder() .addBuilder(com.google.dataflow.v1beta3.WorkItemDetails.getDefaultInstance()); } /** * * * <pre> * Work items processed by this worker, sorted by time. * </pre> * * <code>repeated .google.dataflow.v1beta3.WorkItemDetails work_items = 2;</code> */ public com.google.dataflow.v1beta3.WorkItemDetails.Builder addWorkItemsBuilder(int index) { return getWorkItemsFieldBuilder() .addBuilder(index, com.google.dataflow.v1beta3.WorkItemDetails.getDefaultInstance()); } /** * * * <pre> * Work items processed by this worker, sorted by time. * </pre> * * <code>repeated .google.dataflow.v1beta3.WorkItemDetails work_items = 2;</code> */ public java.util.List<com.google.dataflow.v1beta3.WorkItemDetails.Builder> getWorkItemsBuilderList() { return getWorkItemsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.dataflow.v1beta3.WorkItemDetails, com.google.dataflow.v1beta3.WorkItemDetails.Builder, com.google.dataflow.v1beta3.WorkItemDetailsOrBuilder> getWorkItemsFieldBuilder() { if (workItemsBuilder_ == null) { workItemsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.dataflow.v1beta3.WorkItemDetails, com.google.dataflow.v1beta3.WorkItemDetails.Builder, com.google.dataflow.v1beta3.WorkItemDetailsOrBuilder>( workItems_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); workItems_ = null; } return workItemsBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.dataflow.v1beta3.WorkerDetails) } // @@protoc_insertion_point(class_scope:google.dataflow.v1beta3.WorkerDetails) private static final com.google.dataflow.v1beta3.WorkerDetails DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.dataflow.v1beta3.WorkerDetails(); } public static com.google.dataflow.v1beta3.WorkerDetails getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<WorkerDetails> PARSER = new com.google.protobuf.AbstractParser<WorkerDetails>() { @java.lang.Override public WorkerDetails parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<WorkerDetails> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<WorkerDetails> getParserForType() { return PARSER; } @java.lang.Override public com.google.dataflow.v1beta3.WorkerDetails getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,363
java-dataplex/google-cloud-dataplex/src/main/java/com/google/cloud/dataplex/v1/stub/HttpJsonCmekServiceStub.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.dataplex.v1.stub; import static com.google.cloud.dataplex.v1.CmekServiceClient.ListEncryptionConfigsPagedResponse; import static com.google.cloud.dataplex.v1.CmekServiceClient.ListLocationsPagedResponse; import com.google.api.HttpRule; import com.google.api.core.InternalApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.httpjson.ApiMethodDescriptor; import com.google.api.gax.httpjson.HttpJsonCallSettings; import com.google.api.gax.httpjson.HttpJsonOperationSnapshot; import com.google.api.gax.httpjson.HttpJsonStubCallableFactory; import com.google.api.gax.httpjson.ProtoMessageRequestFormatter; import com.google.api.gax.httpjson.ProtoMessageResponseParser; import com.google.api.gax.httpjson.ProtoRestSerializer; import com.google.api.gax.httpjson.longrunning.stub.HttpJsonOperationsStub; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.RequestParamsBuilder; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest; import com.google.cloud.dataplex.v1.DeleteEncryptionConfigRequest; import com.google.cloud.dataplex.v1.EncryptionConfig; import com.google.cloud.dataplex.v1.GetEncryptionConfigRequest; import com.google.cloud.dataplex.v1.ListEncryptionConfigsRequest; import com.google.cloud.dataplex.v1.ListEncryptionConfigsResponse; import com.google.cloud.dataplex.v1.OperationMetadata; import com.google.cloud.dataplex.v1.UpdateEncryptionConfigRequest; import com.google.cloud.location.GetLocationRequest; import com.google.cloud.location.ListLocationsRequest; import com.google.cloud.location.ListLocationsResponse; import com.google.cloud.location.Location; import com.google.common.collect.ImmutableMap; import com.google.longrunning.Operation; import com.google.protobuf.Empty; import com.google.protobuf.TypeRegistry; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * REST stub implementation for the CmekService service API. * * <p>This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator-java") public class HttpJsonCmekServiceStub extends CmekServiceStub { private static final TypeRegistry typeRegistry = TypeRegistry.newBuilder() .add(Empty.getDescriptor()) .add(EncryptionConfig.getDescriptor()) .add(OperationMetadata.getDescriptor()) .build(); private static final ApiMethodDescriptor<CreateEncryptionConfigRequest, Operation> createEncryptionConfigMethodDescriptor = ApiMethodDescriptor.<CreateEncryptionConfigRequest, Operation>newBuilder() .setFullMethodName("google.cloud.dataplex.v1.CmekService/CreateEncryptionConfig") .setHttpMethod("POST") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<CreateEncryptionConfigRequest>newBuilder() .setPath( "/v1/{parent=organizations/*/locations/*}/encryptionConfigs", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<CreateEncryptionConfigRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "parent", request.getParent()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<CreateEncryptionConfigRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam( fields, "encryptionConfigId", request.getEncryptionConfigId()); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody("encryptionConfig", request.getEncryptionConfig(), true)) .build()) .setResponseParser( ProtoMessageResponseParser.<Operation>newBuilder() .setDefaultInstance(Operation.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .setOperationSnapshotFactory( (CreateEncryptionConfigRequest request, Operation response) -> HttpJsonOperationSnapshot.create(response)) .build(); private static final ApiMethodDescriptor<UpdateEncryptionConfigRequest, Operation> updateEncryptionConfigMethodDescriptor = ApiMethodDescriptor.<UpdateEncryptionConfigRequest, Operation>newBuilder() .setFullMethodName("google.cloud.dataplex.v1.CmekService/UpdateEncryptionConfig") .setHttpMethod("PATCH") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<UpdateEncryptionConfigRequest>newBuilder() .setPath( "/v1/{encryptionConfig.name=organizations/*/locations/*/encryptionConfigs/*}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<UpdateEncryptionConfigRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam( fields, "encryptionConfig.name", request.getEncryptionConfig().getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<UpdateEncryptionConfigRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "updateMask", request.getUpdateMask()); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody("encryptionConfig", request.getEncryptionConfig(), true)) .build()) .setResponseParser( ProtoMessageResponseParser.<Operation>newBuilder() .setDefaultInstance(Operation.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .setOperationSnapshotFactory( (UpdateEncryptionConfigRequest request, Operation response) -> HttpJsonOperationSnapshot.create(response)) .build(); private static final ApiMethodDescriptor<DeleteEncryptionConfigRequest, Operation> deleteEncryptionConfigMethodDescriptor = ApiMethodDescriptor.<DeleteEncryptionConfigRequest, Operation>newBuilder() .setFullMethodName("google.cloud.dataplex.v1.CmekService/DeleteEncryptionConfig") .setHttpMethod("DELETE") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<DeleteEncryptionConfigRequest>newBuilder() .setPath( "/v1/{name=organizations/*/locations/*/encryptionConfigs/*}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<DeleteEncryptionConfigRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "name", request.getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<DeleteEncryptionConfigRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "etag", request.getEtag()); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<Operation>newBuilder() .setDefaultInstance(Operation.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .setOperationSnapshotFactory( (DeleteEncryptionConfigRequest request, Operation response) -> HttpJsonOperationSnapshot.create(response)) .build(); private static final ApiMethodDescriptor< ListEncryptionConfigsRequest, ListEncryptionConfigsResponse> listEncryptionConfigsMethodDescriptor = ApiMethodDescriptor .<ListEncryptionConfigsRequest, ListEncryptionConfigsResponse>newBuilder() .setFullMethodName("google.cloud.dataplex.v1.CmekService/ListEncryptionConfigs") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<ListEncryptionConfigsRequest>newBuilder() .setPath( "/v1/{parent=organizations/*/locations/*}/encryptionConfigs", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<ListEncryptionConfigsRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "parent", request.getParent()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<ListEncryptionConfigsRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "filter", request.getFilter()); serializer.putQueryParam(fields, "orderBy", request.getOrderBy()); serializer.putQueryParam(fields, "pageSize", request.getPageSize()); serializer.putQueryParam(fields, "pageToken", request.getPageToken()); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<ListEncryptionConfigsResponse>newBuilder() .setDefaultInstance(ListEncryptionConfigsResponse.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<GetEncryptionConfigRequest, EncryptionConfig> getEncryptionConfigMethodDescriptor = ApiMethodDescriptor.<GetEncryptionConfigRequest, EncryptionConfig>newBuilder() .setFullMethodName("google.cloud.dataplex.v1.CmekService/GetEncryptionConfig") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<GetEncryptionConfigRequest>newBuilder() .setPath( "/v1/{name=organizations/*/locations/*/encryptionConfigs/*}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<GetEncryptionConfigRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "name", request.getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<GetEncryptionConfigRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<EncryptionConfig>newBuilder() .setDefaultInstance(EncryptionConfig.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<ListLocationsRequest, ListLocationsResponse> listLocationsMethodDescriptor = ApiMethodDescriptor.<ListLocationsRequest, ListLocationsResponse>newBuilder() .setFullMethodName("google.cloud.location.Locations/ListLocations") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<ListLocationsRequest>newBuilder() .setPath( "/v1/{name=projects/*}/locations", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<ListLocationsRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "name", request.getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<ListLocationsRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<ListLocationsResponse>newBuilder() .setDefaultInstance(ListLocationsResponse.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<GetLocationRequest, Location> getLocationMethodDescriptor = ApiMethodDescriptor.<GetLocationRequest, Location>newBuilder() .setFullMethodName("google.cloud.location.Locations/GetLocation") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<GetLocationRequest>newBuilder() .setPath( "/v1/{name=projects/*/locations/*}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<GetLocationRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "name", request.getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<GetLocationRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<Location>newBuilder() .setDefaultInstance(Location.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private final UnaryCallable<CreateEncryptionConfigRequest, Operation> createEncryptionConfigCallable; private final OperationCallable< CreateEncryptionConfigRequest, EncryptionConfig, OperationMetadata> createEncryptionConfigOperationCallable; private final UnaryCallable<UpdateEncryptionConfigRequest, Operation> updateEncryptionConfigCallable; private final OperationCallable< UpdateEncryptionConfigRequest, EncryptionConfig, OperationMetadata> updateEncryptionConfigOperationCallable; private final UnaryCallable<DeleteEncryptionConfigRequest, Operation> deleteEncryptionConfigCallable; private final OperationCallable<DeleteEncryptionConfigRequest, Empty, OperationMetadata> deleteEncryptionConfigOperationCallable; private final UnaryCallable<ListEncryptionConfigsRequest, ListEncryptionConfigsResponse> listEncryptionConfigsCallable; private final UnaryCallable<ListEncryptionConfigsRequest, ListEncryptionConfigsPagedResponse> listEncryptionConfigsPagedCallable; private final UnaryCallable<GetEncryptionConfigRequest, EncryptionConfig> getEncryptionConfigCallable; private final UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable; private final UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse> listLocationsPagedCallable; private final UnaryCallable<GetLocationRequest, Location> getLocationCallable; private final BackgroundResource backgroundResources; private final HttpJsonOperationsStub httpJsonOperationsStub; private final HttpJsonStubCallableFactory callableFactory; public static final HttpJsonCmekServiceStub create(CmekServiceStubSettings settings) throws IOException { return new HttpJsonCmekServiceStub(settings, ClientContext.create(settings)); } public static final HttpJsonCmekServiceStub create(ClientContext clientContext) throws IOException { return new HttpJsonCmekServiceStub( CmekServiceStubSettings.newHttpJsonBuilder().build(), clientContext); } public static final HttpJsonCmekServiceStub create( ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException { return new HttpJsonCmekServiceStub( CmekServiceStubSettings.newHttpJsonBuilder().build(), clientContext, callableFactory); } /** * Constructs an instance of HttpJsonCmekServiceStub, using the given settings. This is protected * so that it is easy to make a subclass, but otherwise, the static factory methods should be * preferred. */ protected HttpJsonCmekServiceStub(CmekServiceStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new HttpJsonCmekServiceCallableFactory()); } /** * Constructs an instance of HttpJsonCmekServiceStub, using the given settings. This is protected * so that it is easy to make a subclass, but otherwise, the static factory methods should be * preferred. */ protected HttpJsonCmekServiceStub( CmekServiceStubSettings settings, ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; this.httpJsonOperationsStub = HttpJsonOperationsStub.create( clientContext, callableFactory, typeRegistry, ImmutableMap.<String, HttpRule>builder() .put( "google.longrunning.Operations.CancelOperation", HttpRule.newBuilder() .setPost("/v1/{name=projects/*/locations/*/operations/*}:cancel") .addAdditionalBindings( HttpRule.newBuilder() .setPost( "/v1/{name=organizations/*/locations/*/operations/*}:cancel") .build()) .build()) .put( "google.longrunning.Operations.DeleteOperation", HttpRule.newBuilder() .setDelete("/v1/{name=projects/*/locations/*/operations/*}") .addAdditionalBindings( HttpRule.newBuilder() .setDelete("/v1/{name=organizations/*/locations/*/operations/*}") .build()) .build()) .put( "google.longrunning.Operations.GetOperation", HttpRule.newBuilder() .setGet("/v1/{name=projects/*/locations/*/operations/*}") .addAdditionalBindings( HttpRule.newBuilder() .setGet("/v1/{name=organizations/*/locations/*/operations/*}") .build()) .build()) .put( "google.longrunning.Operations.ListOperations", HttpRule.newBuilder() .setGet("/v1/{name=projects/*/locations/*}/operations") .addAdditionalBindings( HttpRule.newBuilder() .setGet("/v1/{name=organizations/*/locations/*}/operations") .build()) .build()) .build()); HttpJsonCallSettings<CreateEncryptionConfigRequest, Operation> createEncryptionConfigTransportSettings = HttpJsonCallSettings.<CreateEncryptionConfigRequest, Operation>newBuilder() .setMethodDescriptor(createEncryptionConfigMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); HttpJsonCallSettings<UpdateEncryptionConfigRequest, Operation> updateEncryptionConfigTransportSettings = HttpJsonCallSettings.<UpdateEncryptionConfigRequest, Operation>newBuilder() .setMethodDescriptor(updateEncryptionConfigMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add( "encryption_config.name", String.valueOf(request.getEncryptionConfig().getName())); return builder.build(); }) .build(); HttpJsonCallSettings<DeleteEncryptionConfigRequest, Operation> deleteEncryptionConfigTransportSettings = HttpJsonCallSettings.<DeleteEncryptionConfigRequest, Operation>newBuilder() .setMethodDescriptor(deleteEncryptionConfigMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); HttpJsonCallSettings<ListEncryptionConfigsRequest, ListEncryptionConfigsResponse> listEncryptionConfigsTransportSettings = HttpJsonCallSettings .<ListEncryptionConfigsRequest, ListEncryptionConfigsResponse>newBuilder() .setMethodDescriptor(listEncryptionConfigsMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); HttpJsonCallSettings<GetEncryptionConfigRequest, EncryptionConfig> getEncryptionConfigTransportSettings = HttpJsonCallSettings.<GetEncryptionConfigRequest, EncryptionConfig>newBuilder() .setMethodDescriptor(getEncryptionConfigMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); HttpJsonCallSettings<ListLocationsRequest, ListLocationsResponse> listLocationsTransportSettings = HttpJsonCallSettings.<ListLocationsRequest, ListLocationsResponse>newBuilder() .setMethodDescriptor(listLocationsMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); HttpJsonCallSettings<GetLocationRequest, Location> getLocationTransportSettings = HttpJsonCallSettings.<GetLocationRequest, Location>newBuilder() .setMethodDescriptor(getLocationMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); this.createEncryptionConfigCallable = callableFactory.createUnaryCallable( createEncryptionConfigTransportSettings, settings.createEncryptionConfigSettings(), clientContext); this.createEncryptionConfigOperationCallable = callableFactory.createOperationCallable( createEncryptionConfigTransportSettings, settings.createEncryptionConfigOperationSettings(), clientContext, httpJsonOperationsStub); this.updateEncryptionConfigCallable = callableFactory.createUnaryCallable( updateEncryptionConfigTransportSettings, settings.updateEncryptionConfigSettings(), clientContext); this.updateEncryptionConfigOperationCallable = callableFactory.createOperationCallable( updateEncryptionConfigTransportSettings, settings.updateEncryptionConfigOperationSettings(), clientContext, httpJsonOperationsStub); this.deleteEncryptionConfigCallable = callableFactory.createUnaryCallable( deleteEncryptionConfigTransportSettings, settings.deleteEncryptionConfigSettings(), clientContext); this.deleteEncryptionConfigOperationCallable = callableFactory.createOperationCallable( deleteEncryptionConfigTransportSettings, settings.deleteEncryptionConfigOperationSettings(), clientContext, httpJsonOperationsStub); this.listEncryptionConfigsCallable = callableFactory.createUnaryCallable( listEncryptionConfigsTransportSettings, settings.listEncryptionConfigsSettings(), clientContext); this.listEncryptionConfigsPagedCallable = callableFactory.createPagedCallable( listEncryptionConfigsTransportSettings, settings.listEncryptionConfigsSettings(), clientContext); this.getEncryptionConfigCallable = callableFactory.createUnaryCallable( getEncryptionConfigTransportSettings, settings.getEncryptionConfigSettings(), clientContext); this.listLocationsCallable = callableFactory.createUnaryCallable( listLocationsTransportSettings, settings.listLocationsSettings(), clientContext); this.listLocationsPagedCallable = callableFactory.createPagedCallable( listLocationsTransportSettings, settings.listLocationsSettings(), clientContext); this.getLocationCallable = callableFactory.createUnaryCallable( getLocationTransportSettings, settings.getLocationSettings(), clientContext); this.backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } @InternalApi public static List<ApiMethodDescriptor> getMethodDescriptors() { List<ApiMethodDescriptor> methodDescriptors = new ArrayList<>(); methodDescriptors.add(createEncryptionConfigMethodDescriptor); methodDescriptors.add(updateEncryptionConfigMethodDescriptor); methodDescriptors.add(deleteEncryptionConfigMethodDescriptor); methodDescriptors.add(listEncryptionConfigsMethodDescriptor); methodDescriptors.add(getEncryptionConfigMethodDescriptor); methodDescriptors.add(listLocationsMethodDescriptor); methodDescriptors.add(getLocationMethodDescriptor); return methodDescriptors; } public HttpJsonOperationsStub getHttpJsonOperationsStub() { return httpJsonOperationsStub; } @Override public UnaryCallable<CreateEncryptionConfigRequest, Operation> createEncryptionConfigCallable() { return createEncryptionConfigCallable; } @Override public OperationCallable<CreateEncryptionConfigRequest, EncryptionConfig, OperationMetadata> createEncryptionConfigOperationCallable() { return createEncryptionConfigOperationCallable; } @Override public UnaryCallable<UpdateEncryptionConfigRequest, Operation> updateEncryptionConfigCallable() { return updateEncryptionConfigCallable; } @Override public OperationCallable<UpdateEncryptionConfigRequest, EncryptionConfig, OperationMetadata> updateEncryptionConfigOperationCallable() { return updateEncryptionConfigOperationCallable; } @Override public UnaryCallable<DeleteEncryptionConfigRequest, Operation> deleteEncryptionConfigCallable() { return deleteEncryptionConfigCallable; } @Override public OperationCallable<DeleteEncryptionConfigRequest, Empty, OperationMetadata> deleteEncryptionConfigOperationCallable() { return deleteEncryptionConfigOperationCallable; } @Override public UnaryCallable<ListEncryptionConfigsRequest, ListEncryptionConfigsResponse> listEncryptionConfigsCallable() { return listEncryptionConfigsCallable; } @Override public UnaryCallable<ListEncryptionConfigsRequest, ListEncryptionConfigsPagedResponse> listEncryptionConfigsPagedCallable() { return listEncryptionConfigsPagedCallable; } @Override public UnaryCallable<GetEncryptionConfigRequest, EncryptionConfig> getEncryptionConfigCallable() { return getEncryptionConfigCallable; } @Override public UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable() { return listLocationsCallable; } @Override public UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse> listLocationsPagedCallable() { return listLocationsPagedCallable; } @Override public UnaryCallable<GetLocationRequest, Location> getLocationCallable() { return getLocationCallable; } @Override public final void close() { try { backgroundResources.close(); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new IllegalStateException("Failed to close resource", e); } } @Override public void shutdown() { backgroundResources.shutdown(); } @Override public boolean isShutdown() { return backgroundResources.isShutdown(); } @Override public boolean isTerminated() { return backgroundResources.isTerminated(); } @Override public void shutdownNow() { backgroundResources.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return backgroundResources.awaitTermination(duration, unit); } }
googleapis/google-api-java-client-services
35,203
clients/google-api-services-firebasedynamiclinks/v1/2.0.0/com/google/api/services/firebasedynamiclinks/v1/FirebaseDynamicLinks.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.firebasedynamiclinks.v1; /** * Service definition for FirebaseDynamicLinks (v1). * * <p> * Programmatically creates and manages Firebase Dynamic Links. * </p> * * <p> * For more information about this service, see the * <a href="https://firebase.google.com/docs/dynamic-links/" target="_blank">API Documentation</a> * </p> * * <p> * This service uses {@link FirebaseDynamicLinksRequestInitializer} to initialize global parameters via its * {@link Builder}. * </p> * * @since 1.3 * @author Google, Inc. */ @SuppressWarnings("javadoc") public class FirebaseDynamicLinks extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient { // Note: Leave this static initializer at the top of the file. static { com.google.api.client.util.Preconditions.checkState( (com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION == 1 && (com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION >= 32 || (com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION == 31 && com.google.api.client.googleapis.GoogleUtils.BUGFIX_VERSION >= 1))) || com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION >= 2, "You are currently running with version %s of google-api-client. " + "You need at least version 1.31.1 of google-api-client to run version " + "2.0.0 of the Firebase Dynamic Links API library.", com.google.api.client.googleapis.GoogleUtils.VERSION); } /** * The default encoded root URL of the service. This is determined when the library is generated * and normally should not be changed. * * @since 1.7 */ public static final String DEFAULT_ROOT_URL = "https://firebasedynamiclinks.googleapis.com/"; /** * The default encoded mTLS root URL of the service. This is determined when the library is generated * and normally should not be changed. * * @since 1.31 */ public static final String DEFAULT_MTLS_ROOT_URL = "https://firebasedynamiclinks.mtls.googleapis.com/"; /** * The default encoded service path of the service. This is determined when the library is * generated and normally should not be changed. * * @since 1.7 */ public static final String DEFAULT_SERVICE_PATH = ""; /** * The default encoded batch path of the service. This is determined when the library is * generated and normally should not be changed. * * @since 1.23 */ public static final String DEFAULT_BATCH_PATH = "batch"; /** * The default encoded base URL of the service. This is determined when the library is generated * and normally should not be changed. */ public static final String DEFAULT_BASE_URL = DEFAULT_ROOT_URL + DEFAULT_SERVICE_PATH; /** * Constructor. * * <p> * Use {@link Builder} if you need to specify any of the optional parameters. * </p> * * @param transport HTTP transport, which should normally be: * <ul> * <li>Google App Engine: * {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li> * <li>Android: {@code newCompatibleTransport} from * {@code com.google.api.client.extensions.android.http.AndroidHttp}</li> * <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()} * </li> * </ul> * @param jsonFactory JSON factory, which may be: * <ul> * <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li> * <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li> * <li>Android Honeycomb or higher: * {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li> * </ul> * @param httpRequestInitializer HTTP request initializer or {@code null} for none * @since 1.7 */ public FirebaseDynamicLinks(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory, com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { this(new Builder(transport, jsonFactory, httpRequestInitializer)); } /** * @param builder builder */ FirebaseDynamicLinks(Builder builder) { super(builder); } @Override protected void initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest<?> httpClientRequest) throws java.io.IOException { super.initialize(httpClientRequest); } /** * An accessor for creating requests from the ManagedShortLinks collection. * * <p>The typical use is:</p> * <pre> * {@code FirebaseDynamicLinks firebasedynamiclinks = new FirebaseDynamicLinks(...);} * {@code FirebaseDynamicLinks.ManagedShortLinks.List request = firebasedynamiclinks.managedShortLinks().list(parameters ...)} * </pre> * * @return the resource collection */ public ManagedShortLinks managedShortLinks() { return new ManagedShortLinks(); } /** * The "managedShortLinks" collection of methods. */ public class ManagedShortLinks { /** * Creates a managed short Dynamic Link given either a valid long Dynamic Link or details such as * Dynamic Link domain, Android and iOS app information. The created short Dynamic Link will not * expire. This differs from CreateShortDynamicLink in the following ways: - The request will also * contain a name for the link (non unique name for the front end). - The response must be * authenticated with an auth token (generated with the admin service account). - The link will * appear in the FDL list of links in the console front end. The Dynamic Link domain in the request * must be owned by requester's Firebase project. * * Create a request for the method "managedShortLinks.create". * * This request holds the parameters needed by the firebasedynamiclinks server. After setting any * optional parameters, call the {@link Create#execute()} method to invoke the remote operation. * * @param content the {@link com.google.api.services.firebasedynamiclinks.v1.model.CreateManagedShortLinkRequest} * @return the request */ public Create create(com.google.api.services.firebasedynamiclinks.v1.model.CreateManagedShortLinkRequest content) throws java.io.IOException { Create result = new Create(content); initialize(result); return result; } public class Create extends FirebaseDynamicLinksRequest<com.google.api.services.firebasedynamiclinks.v1.model.CreateManagedShortLinkResponse> { private static final String REST_PATH = "v1/managedShortLinks:create"; /** * Creates a managed short Dynamic Link given either a valid long Dynamic Link or details such as * Dynamic Link domain, Android and iOS app information. The created short Dynamic Link will not * expire. This differs from CreateShortDynamicLink in the following ways: - The request will also * contain a name for the link (non unique name for the front end). - The response must be * authenticated with an auth token (generated with the admin service account). - The link will * appear in the FDL list of links in the console front end. The Dynamic Link domain in the * request must be owned by requester's Firebase project. * * Create a request for the method "managedShortLinks.create". * * This request holds the parameters needed by the the firebasedynamiclinks server. After setting * any optional parameters, call the {@link Create#execute()} method to invoke the remote * operation. <p> {@link * Create#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must * be called to initialize this instance immediately after invoking the constructor. </p> * * @param content the {@link com.google.api.services.firebasedynamiclinks.v1.model.CreateManagedShortLinkRequest} * @since 1.13 */ protected Create(com.google.api.services.firebasedynamiclinks.v1.model.CreateManagedShortLinkRequest content) { super(FirebaseDynamicLinks.this, "POST", REST_PATH, content, com.google.api.services.firebasedynamiclinks.v1.model.CreateManagedShortLinkResponse.class); } @Override public Create set$Xgafv(java.lang.String $Xgafv) { return (Create) super.set$Xgafv($Xgafv); } @Override public Create setAccessToken(java.lang.String accessToken) { return (Create) super.setAccessToken(accessToken); } @Override public Create setAlt(java.lang.String alt) { return (Create) super.setAlt(alt); } @Override public Create setCallback(java.lang.String callback) { return (Create) super.setCallback(callback); } @Override public Create setFields(java.lang.String fields) { return (Create) super.setFields(fields); } @Override public Create setKey(java.lang.String key) { return (Create) super.setKey(key); } @Override public Create setOauthToken(java.lang.String oauthToken) { return (Create) super.setOauthToken(oauthToken); } @Override public Create setPrettyPrint(java.lang.Boolean prettyPrint) { return (Create) super.setPrettyPrint(prettyPrint); } @Override public Create setQuotaUser(java.lang.String quotaUser) { return (Create) super.setQuotaUser(quotaUser); } @Override public Create setUploadType(java.lang.String uploadType) { return (Create) super.setUploadType(uploadType); } @Override public Create setUploadProtocol(java.lang.String uploadProtocol) { return (Create) super.setUploadProtocol(uploadProtocol); } @Override public Create set(String parameterName, Object value) { return (Create) super.set(parameterName, value); } } } /** * An accessor for creating requests from the ShortLinks collection. * * <p>The typical use is:</p> * <pre> * {@code FirebaseDynamicLinks firebasedynamiclinks = new FirebaseDynamicLinks(...);} * {@code FirebaseDynamicLinks.ShortLinks.List request = firebasedynamiclinks.shortLinks().list(parameters ...)} * </pre> * * @return the resource collection */ public ShortLinks shortLinks() { return new ShortLinks(); } /** * The "shortLinks" collection of methods. */ public class ShortLinks { /** * Creates a short Dynamic Link given either a valid long Dynamic Link or details such as Dynamic * Link domain, Android and iOS app information. The created short Dynamic Link will not expire. * Repeated calls with the same long Dynamic Link or Dynamic Link information will produce the same * short Dynamic Link. The Dynamic Link domain in the request must be owned by requester's Firebase * project. * * Create a request for the method "shortLinks.create". * * This request holds the parameters needed by the firebasedynamiclinks server. After setting any * optional parameters, call the {@link Create#execute()} method to invoke the remote operation. * * @param content the {@link com.google.api.services.firebasedynamiclinks.v1.model.CreateShortDynamicLinkRequest} * @return the request */ public Create create(com.google.api.services.firebasedynamiclinks.v1.model.CreateShortDynamicLinkRequest content) throws java.io.IOException { Create result = new Create(content); initialize(result); return result; } public class Create extends FirebaseDynamicLinksRequest<com.google.api.services.firebasedynamiclinks.v1.model.CreateShortDynamicLinkResponse> { private static final String REST_PATH = "v1/shortLinks"; /** * Creates a short Dynamic Link given either a valid long Dynamic Link or details such as Dynamic * Link domain, Android and iOS app information. The created short Dynamic Link will not expire. * Repeated calls with the same long Dynamic Link or Dynamic Link information will produce the * same short Dynamic Link. The Dynamic Link domain in the request must be owned by requester's * Firebase project. * * Create a request for the method "shortLinks.create". * * This request holds the parameters needed by the the firebasedynamiclinks server. After setting * any optional parameters, call the {@link Create#execute()} method to invoke the remote * operation. <p> {@link * Create#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must * be called to initialize this instance immediately after invoking the constructor. </p> * * @param content the {@link com.google.api.services.firebasedynamiclinks.v1.model.CreateShortDynamicLinkRequest} * @since 1.13 */ protected Create(com.google.api.services.firebasedynamiclinks.v1.model.CreateShortDynamicLinkRequest content) { super(FirebaseDynamicLinks.this, "POST", REST_PATH, content, com.google.api.services.firebasedynamiclinks.v1.model.CreateShortDynamicLinkResponse.class); } @Override public Create set$Xgafv(java.lang.String $Xgafv) { return (Create) super.set$Xgafv($Xgafv); } @Override public Create setAccessToken(java.lang.String accessToken) { return (Create) super.setAccessToken(accessToken); } @Override public Create setAlt(java.lang.String alt) { return (Create) super.setAlt(alt); } @Override public Create setCallback(java.lang.String callback) { return (Create) super.setCallback(callback); } @Override public Create setFields(java.lang.String fields) { return (Create) super.setFields(fields); } @Override public Create setKey(java.lang.String key) { return (Create) super.setKey(key); } @Override public Create setOauthToken(java.lang.String oauthToken) { return (Create) super.setOauthToken(oauthToken); } @Override public Create setPrettyPrint(java.lang.Boolean prettyPrint) { return (Create) super.setPrettyPrint(prettyPrint); } @Override public Create setQuotaUser(java.lang.String quotaUser) { return (Create) super.setQuotaUser(quotaUser); } @Override public Create setUploadType(java.lang.String uploadType) { return (Create) super.setUploadType(uploadType); } @Override public Create setUploadProtocol(java.lang.String uploadProtocol) { return (Create) super.setUploadProtocol(uploadProtocol); } @Override public Create set(String parameterName, Object value) { return (Create) super.set(parameterName, value); } } } /** * An accessor for creating requests from the V1 collection. * * <p>The typical use is:</p> * <pre> * {@code FirebaseDynamicLinks firebasedynamiclinks = new FirebaseDynamicLinks(...);} * {@code FirebaseDynamicLinks.V1.List request = firebasedynamiclinks.v1().list(parameters ...)} * </pre> * * @return the resource collection */ public V1 v1() { return new V1(); } /** * The "v1" collection of methods. */ public class V1 { /** * Fetches analytics stats of a short Dynamic Link for a given duration. Metrics include number of * clicks, redirects, installs, app first opens, and app reopens. * * Create a request for the method "v1.getLinkStats". * * This request holds the parameters needed by the firebasedynamiclinks server. After setting any * optional parameters, call the {@link GetLinkStats#execute()} method to invoke the remote * operation. * * @param dynamicLink Dynamic Link URL. e.g. https://abcd.app.goo.gl/wxyz * @return the request */ public GetLinkStats getLinkStats(java.lang.String dynamicLink) throws java.io.IOException { GetLinkStats result = new GetLinkStats(dynamicLink); initialize(result); return result; } public class GetLinkStats extends FirebaseDynamicLinksRequest<com.google.api.services.firebasedynamiclinks.v1.model.DynamicLinkStats> { private static final String REST_PATH = "v1/{dynamicLink}/linkStats"; /** * Fetches analytics stats of a short Dynamic Link for a given duration. Metrics include number of * clicks, redirects, installs, app first opens, and app reopens. * * Create a request for the method "v1.getLinkStats". * * This request holds the parameters needed by the the firebasedynamiclinks server. After setting * any optional parameters, call the {@link GetLinkStats#execute()} method to invoke the remote * operation. <p> {@link * GetLinkStats#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} * must be called to initialize this instance immediately after invoking the constructor. </p> * * @param dynamicLink Dynamic Link URL. e.g. https://abcd.app.goo.gl/wxyz * @since 1.13 */ protected GetLinkStats(java.lang.String dynamicLink) { super(FirebaseDynamicLinks.this, "GET", REST_PATH, null, com.google.api.services.firebasedynamiclinks.v1.model.DynamicLinkStats.class); this.dynamicLink = com.google.api.client.util.Preconditions.checkNotNull(dynamicLink, "Required parameter dynamicLink must be specified."); } @Override public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException { return super.executeUsingHead(); } @Override public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException { return super.buildHttpRequestUsingHead(); } @Override public GetLinkStats set$Xgafv(java.lang.String $Xgafv) { return (GetLinkStats) super.set$Xgafv($Xgafv); } @Override public GetLinkStats setAccessToken(java.lang.String accessToken) { return (GetLinkStats) super.setAccessToken(accessToken); } @Override public GetLinkStats setAlt(java.lang.String alt) { return (GetLinkStats) super.setAlt(alt); } @Override public GetLinkStats setCallback(java.lang.String callback) { return (GetLinkStats) super.setCallback(callback); } @Override public GetLinkStats setFields(java.lang.String fields) { return (GetLinkStats) super.setFields(fields); } @Override public GetLinkStats setKey(java.lang.String key) { return (GetLinkStats) super.setKey(key); } @Override public GetLinkStats setOauthToken(java.lang.String oauthToken) { return (GetLinkStats) super.setOauthToken(oauthToken); } @Override public GetLinkStats setPrettyPrint(java.lang.Boolean prettyPrint) { return (GetLinkStats) super.setPrettyPrint(prettyPrint); } @Override public GetLinkStats setQuotaUser(java.lang.String quotaUser) { return (GetLinkStats) super.setQuotaUser(quotaUser); } @Override public GetLinkStats setUploadType(java.lang.String uploadType) { return (GetLinkStats) super.setUploadType(uploadType); } @Override public GetLinkStats setUploadProtocol(java.lang.String uploadProtocol) { return (GetLinkStats) super.setUploadProtocol(uploadProtocol); } /** Dynamic Link URL. e.g. https://abcd.app.goo.gl/wxyz */ @com.google.api.client.util.Key private java.lang.String dynamicLink; /** Dynamic Link URL. e.g. https://abcd.app.goo.gl/wxyz */ public java.lang.String getDynamicLink() { return dynamicLink; } /** Dynamic Link URL. e.g. https://abcd.app.goo.gl/wxyz */ public GetLinkStats setDynamicLink(java.lang.String dynamicLink) { this.dynamicLink = dynamicLink; return this; } /** The span of time requested in days. */ @com.google.api.client.util.Key private java.lang.Long durationDays; /** The span of time requested in days. */ public java.lang.Long getDurationDays() { return durationDays; } /** The span of time requested in days. */ public GetLinkStats setDurationDays(java.lang.Long durationDays) { this.durationDays = durationDays; return this; } /** Google SDK version. Version takes the form "$major.$minor.$patch" */ @com.google.api.client.util.Key private java.lang.String sdkVersion; /** Google SDK version. Version takes the form "$major.$minor.$patch" */ public java.lang.String getSdkVersion() { return sdkVersion; } /** Google SDK version. Version takes the form "$major.$minor.$patch" */ public GetLinkStats setSdkVersion(java.lang.String sdkVersion) { this.sdkVersion = sdkVersion; return this; } @Override public GetLinkStats set(String parameterName, Object value) { return (GetLinkStats) super.set(parameterName, value); } } /** * Get iOS strong/weak-match info for post-install attribution. * * Create a request for the method "v1.installAttribution". * * This request holds the parameters needed by the firebasedynamiclinks server. After setting any * optional parameters, call the {@link InstallAttribution#execute()} method to invoke the remote * operation. * * @param content the {@link com.google.api.services.firebasedynamiclinks.v1.model.GetIosPostInstallAttributionRequest} * @return the request */ public InstallAttribution installAttribution(com.google.api.services.firebasedynamiclinks.v1.model.GetIosPostInstallAttributionRequest content) throws java.io.IOException { InstallAttribution result = new InstallAttribution(content); initialize(result); return result; } public class InstallAttribution extends FirebaseDynamicLinksRequest<com.google.api.services.firebasedynamiclinks.v1.model.GetIosPostInstallAttributionResponse> { private static final String REST_PATH = "v1/installAttribution"; /** * Get iOS strong/weak-match info for post-install attribution. * * Create a request for the method "v1.installAttribution". * * This request holds the parameters needed by the the firebasedynamiclinks server. After setting * any optional parameters, call the {@link InstallAttribution#execute()} method to invoke the * remote operation. <p> {@link InstallAttribution#initialize(com.google.api.client.googleapis.ser * vices.AbstractGoogleClientRequest)} must be called to initialize this instance immediately * after invoking the constructor. </p> * * @param content the {@link com.google.api.services.firebasedynamiclinks.v1.model.GetIosPostInstallAttributionRequest} * @since 1.13 */ protected InstallAttribution(com.google.api.services.firebasedynamiclinks.v1.model.GetIosPostInstallAttributionRequest content) { super(FirebaseDynamicLinks.this, "POST", REST_PATH, content, com.google.api.services.firebasedynamiclinks.v1.model.GetIosPostInstallAttributionResponse.class); } @Override public InstallAttribution set$Xgafv(java.lang.String $Xgafv) { return (InstallAttribution) super.set$Xgafv($Xgafv); } @Override public InstallAttribution setAccessToken(java.lang.String accessToken) { return (InstallAttribution) super.setAccessToken(accessToken); } @Override public InstallAttribution setAlt(java.lang.String alt) { return (InstallAttribution) super.setAlt(alt); } @Override public InstallAttribution setCallback(java.lang.String callback) { return (InstallAttribution) super.setCallback(callback); } @Override public InstallAttribution setFields(java.lang.String fields) { return (InstallAttribution) super.setFields(fields); } @Override public InstallAttribution setKey(java.lang.String key) { return (InstallAttribution) super.setKey(key); } @Override public InstallAttribution setOauthToken(java.lang.String oauthToken) { return (InstallAttribution) super.setOauthToken(oauthToken); } @Override public InstallAttribution setPrettyPrint(java.lang.Boolean prettyPrint) { return (InstallAttribution) super.setPrettyPrint(prettyPrint); } @Override public InstallAttribution setQuotaUser(java.lang.String quotaUser) { return (InstallAttribution) super.setQuotaUser(quotaUser); } @Override public InstallAttribution setUploadType(java.lang.String uploadType) { return (InstallAttribution) super.setUploadType(uploadType); } @Override public InstallAttribution setUploadProtocol(java.lang.String uploadProtocol) { return (InstallAttribution) super.setUploadProtocol(uploadProtocol); } @Override public InstallAttribution set(String parameterName, Object value) { return (InstallAttribution) super.set(parameterName, value); } } /** * Get iOS reopen attribution for app universal link open deeplinking. * * Create a request for the method "v1.reopenAttribution". * * This request holds the parameters needed by the firebasedynamiclinks server. After setting any * optional parameters, call the {@link ReopenAttribution#execute()} method to invoke the remote * operation. * * @param content the {@link com.google.api.services.firebasedynamiclinks.v1.model.GetIosReopenAttributionRequest} * @return the request */ public ReopenAttribution reopenAttribution(com.google.api.services.firebasedynamiclinks.v1.model.GetIosReopenAttributionRequest content) throws java.io.IOException { ReopenAttribution result = new ReopenAttribution(content); initialize(result); return result; } public class ReopenAttribution extends FirebaseDynamicLinksRequest<com.google.api.services.firebasedynamiclinks.v1.model.GetIosReopenAttributionResponse> { private static final String REST_PATH = "v1/reopenAttribution"; /** * Get iOS reopen attribution for app universal link open deeplinking. * * Create a request for the method "v1.reopenAttribution". * * This request holds the parameters needed by the the firebasedynamiclinks server. After setting * any optional parameters, call the {@link ReopenAttribution#execute()} method to invoke the * remote operation. <p> {@link ReopenAttribution#initialize(com.google.api.client.googleapis.serv * ices.AbstractGoogleClientRequest)} must be called to initialize this instance immediately after * invoking the constructor. </p> * * @param content the {@link com.google.api.services.firebasedynamiclinks.v1.model.GetIosReopenAttributionRequest} * @since 1.13 */ protected ReopenAttribution(com.google.api.services.firebasedynamiclinks.v1.model.GetIosReopenAttributionRequest content) { super(FirebaseDynamicLinks.this, "POST", REST_PATH, content, com.google.api.services.firebasedynamiclinks.v1.model.GetIosReopenAttributionResponse.class); } @Override public ReopenAttribution set$Xgafv(java.lang.String $Xgafv) { return (ReopenAttribution) super.set$Xgafv($Xgafv); } @Override public ReopenAttribution setAccessToken(java.lang.String accessToken) { return (ReopenAttribution) super.setAccessToken(accessToken); } @Override public ReopenAttribution setAlt(java.lang.String alt) { return (ReopenAttribution) super.setAlt(alt); } @Override public ReopenAttribution setCallback(java.lang.String callback) { return (ReopenAttribution) super.setCallback(callback); } @Override public ReopenAttribution setFields(java.lang.String fields) { return (ReopenAttribution) super.setFields(fields); } @Override public ReopenAttribution setKey(java.lang.String key) { return (ReopenAttribution) super.setKey(key); } @Override public ReopenAttribution setOauthToken(java.lang.String oauthToken) { return (ReopenAttribution) super.setOauthToken(oauthToken); } @Override public ReopenAttribution setPrettyPrint(java.lang.Boolean prettyPrint) { return (ReopenAttribution) super.setPrettyPrint(prettyPrint); } @Override public ReopenAttribution setQuotaUser(java.lang.String quotaUser) { return (ReopenAttribution) super.setQuotaUser(quotaUser); } @Override public ReopenAttribution setUploadType(java.lang.String uploadType) { return (ReopenAttribution) super.setUploadType(uploadType); } @Override public ReopenAttribution setUploadProtocol(java.lang.String uploadProtocol) { return (ReopenAttribution) super.setUploadProtocol(uploadProtocol); } @Override public ReopenAttribution set(String parameterName, Object value) { return (ReopenAttribution) super.set(parameterName, value); } } } /** * Builder for {@link FirebaseDynamicLinks}. * * <p> * Implementation is not thread-safe. * </p> * * @since 1.3.0 */ public static final class Builder extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient.Builder { private static String chooseEndpoint(com.google.api.client.http.HttpTransport transport) { // If the GOOGLE_API_USE_MTLS_ENDPOINT environment variable value is "always", use mTLS endpoint. // If the env variable is "auto", use mTLS endpoint if and only if the transport is mTLS. // Use the regular endpoint for all other cases. String useMtlsEndpoint = System.getenv("GOOGLE_API_USE_MTLS_ENDPOINT"); useMtlsEndpoint = useMtlsEndpoint == null ? "auto" : useMtlsEndpoint; if ("always".equals(useMtlsEndpoint) || ("auto".equals(useMtlsEndpoint) && transport != null && transport.isMtls())) { return DEFAULT_MTLS_ROOT_URL; } return DEFAULT_ROOT_URL; } /** * Returns an instance of a new builder. * * @param transport HTTP transport, which should normally be: * <ul> * <li>Google App Engine: * {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li> * <li>Android: {@code newCompatibleTransport} from * {@code com.google.api.client.extensions.android.http.AndroidHttp}</li> * <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()} * </li> * </ul> * @param jsonFactory JSON factory, which may be: * <ul> * <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li> * <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li> * <li>Android Honeycomb or higher: * {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li> * </ul> * @param httpRequestInitializer HTTP request initializer or {@code null} for none * @since 1.7 */ public Builder(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory, com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { super( transport, jsonFactory, Builder.chooseEndpoint(transport), DEFAULT_SERVICE_PATH, httpRequestInitializer, false); setBatchPath(DEFAULT_BATCH_PATH); } /** Builds a new instance of {@link FirebaseDynamicLinks}. */ @Override public FirebaseDynamicLinks build() { return new FirebaseDynamicLinks(this); } @Override public Builder setRootUrl(String rootUrl) { return (Builder) super.setRootUrl(rootUrl); } @Override public Builder setServicePath(String servicePath) { return (Builder) super.setServicePath(servicePath); } @Override public Builder setBatchPath(String batchPath) { return (Builder) super.setBatchPath(batchPath); } @Override public Builder setHttpRequestInitializer(com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { return (Builder) super.setHttpRequestInitializer(httpRequestInitializer); } @Override public Builder setApplicationName(String applicationName) { return (Builder) super.setApplicationName(applicationName); } @Override public Builder setSuppressPatternChecks(boolean suppressPatternChecks) { return (Builder) super.setSuppressPatternChecks(suppressPatternChecks); } @Override public Builder setSuppressRequiredParameterChecks(boolean suppressRequiredParameterChecks) { return (Builder) super.setSuppressRequiredParameterChecks(suppressRequiredParameterChecks); } @Override public Builder setSuppressAllChecks(boolean suppressAllChecks) { return (Builder) super.setSuppressAllChecks(suppressAllChecks); } /** * Set the {@link FirebaseDynamicLinksRequestInitializer}. * * @since 1.12 */ public Builder setFirebaseDynamicLinksRequestInitializer( FirebaseDynamicLinksRequestInitializer firebasedynamiclinksRequestInitializer) { return (Builder) super.setGoogleClientRequestInitializer(firebasedynamiclinksRequestInitializer); } @Override public Builder setGoogleClientRequestInitializer( com.google.api.client.googleapis.services.GoogleClientRequestInitializer googleClientRequestInitializer) { return (Builder) super.setGoogleClientRequestInitializer(googleClientRequestInitializer); } @Override public Builder setUniverseDomain(String universeDomain) { return (Builder) super.setUniverseDomain(universeDomain); } } }
googleads/google-ads-java
35,373
google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/services/SuggestGeoTargetConstantsResponse.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v19/services/geo_target_constant_service.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v19.services; /** * <pre> * Response message for * [GeoTargetConstantService.SuggestGeoTargetConstants][google.ads.googleads.v19.services.GeoTargetConstantService.SuggestGeoTargetConstants]. * </pre> * * Protobuf type {@code google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse} */ public final class SuggestGeoTargetConstantsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse) SuggestGeoTargetConstantsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use SuggestGeoTargetConstantsResponse.newBuilder() to construct. private SuggestGeoTargetConstantsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SuggestGeoTargetConstantsResponse() { geoTargetConstantSuggestions_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new SuggestGeoTargetConstantsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.services.GeoTargetConstantServiceProto.internal_static_google_ads_googleads_v19_services_SuggestGeoTargetConstantsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.services.GeoTargetConstantServiceProto.internal_static_google_ads_googleads_v19_services_SuggestGeoTargetConstantsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse.class, com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse.Builder.class); } public static final int GEO_TARGET_CONSTANT_SUGGESTIONS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.ads.googleads.v19.services.GeoTargetConstantSuggestion> geoTargetConstantSuggestions_; /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v19.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ @java.lang.Override public java.util.List<com.google.ads.googleads.v19.services.GeoTargetConstantSuggestion> getGeoTargetConstantSuggestionsList() { return geoTargetConstantSuggestions_; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v19.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.ads.googleads.v19.services.GeoTargetConstantSuggestionOrBuilder> getGeoTargetConstantSuggestionsOrBuilderList() { return geoTargetConstantSuggestions_; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v19.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ @java.lang.Override public int getGeoTargetConstantSuggestionsCount() { return geoTargetConstantSuggestions_.size(); } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v19.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v19.services.GeoTargetConstantSuggestion getGeoTargetConstantSuggestions(int index) { return geoTargetConstantSuggestions_.get(index); } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v19.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v19.services.GeoTargetConstantSuggestionOrBuilder getGeoTargetConstantSuggestionsOrBuilder( int index) { return geoTargetConstantSuggestions_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < geoTargetConstantSuggestions_.size(); i++) { output.writeMessage(1, geoTargetConstantSuggestions_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < geoTargetConstantSuggestions_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, geoTargetConstantSuggestions_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse)) { return super.equals(obj); } com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse other = (com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse) obj; if (!getGeoTargetConstantSuggestionsList() .equals(other.getGeoTargetConstantSuggestionsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getGeoTargetConstantSuggestionsCount() > 0) { hash = (37 * hash) + GEO_TARGET_CONSTANT_SUGGESTIONS_FIELD_NUMBER; hash = (53 * hash) + getGeoTargetConstantSuggestionsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Response message for * [GeoTargetConstantService.SuggestGeoTargetConstants][google.ads.googleads.v19.services.GeoTargetConstantService.SuggestGeoTargetConstants]. * </pre> * * Protobuf type {@code google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse) com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.services.GeoTargetConstantServiceProto.internal_static_google_ads_googleads_v19_services_SuggestGeoTargetConstantsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.services.GeoTargetConstantServiceProto.internal_static_google_ads_googleads_v19_services_SuggestGeoTargetConstantsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse.class, com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse.Builder.class); } // Construct using com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (geoTargetConstantSuggestionsBuilder_ == null) { geoTargetConstantSuggestions_ = java.util.Collections.emptyList(); } else { geoTargetConstantSuggestions_ = null; geoTargetConstantSuggestionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v19.services.GeoTargetConstantServiceProto.internal_static_google_ads_googleads_v19_services_SuggestGeoTargetConstantsResponse_descriptor; } @java.lang.Override public com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse getDefaultInstanceForType() { return com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse build() { com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse buildPartial() { com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse result = new com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse result) { if (geoTargetConstantSuggestionsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { geoTargetConstantSuggestions_ = java.util.Collections.unmodifiableList(geoTargetConstantSuggestions_); bitField0_ = (bitField0_ & ~0x00000001); } result.geoTargetConstantSuggestions_ = geoTargetConstantSuggestions_; } else { result.geoTargetConstantSuggestions_ = geoTargetConstantSuggestionsBuilder_.build(); } } private void buildPartial0(com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse) { return mergeFrom((com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse other) { if (other == com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse.getDefaultInstance()) return this; if (geoTargetConstantSuggestionsBuilder_ == null) { if (!other.geoTargetConstantSuggestions_.isEmpty()) { if (geoTargetConstantSuggestions_.isEmpty()) { geoTargetConstantSuggestions_ = other.geoTargetConstantSuggestions_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureGeoTargetConstantSuggestionsIsMutable(); geoTargetConstantSuggestions_.addAll(other.geoTargetConstantSuggestions_); } onChanged(); } } else { if (!other.geoTargetConstantSuggestions_.isEmpty()) { if (geoTargetConstantSuggestionsBuilder_.isEmpty()) { geoTargetConstantSuggestionsBuilder_.dispose(); geoTargetConstantSuggestionsBuilder_ = null; geoTargetConstantSuggestions_ = other.geoTargetConstantSuggestions_; bitField0_ = (bitField0_ & ~0x00000001); geoTargetConstantSuggestionsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getGeoTargetConstantSuggestionsFieldBuilder() : null; } else { geoTargetConstantSuggestionsBuilder_.addAllMessages(other.geoTargetConstantSuggestions_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.ads.googleads.v19.services.GeoTargetConstantSuggestion m = input.readMessage( com.google.ads.googleads.v19.services.GeoTargetConstantSuggestion.parser(), extensionRegistry); if (geoTargetConstantSuggestionsBuilder_ == null) { ensureGeoTargetConstantSuggestionsIsMutable(); geoTargetConstantSuggestions_.add(m); } else { geoTargetConstantSuggestionsBuilder_.addMessage(m); } break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.ads.googleads.v19.services.GeoTargetConstantSuggestion> geoTargetConstantSuggestions_ = java.util.Collections.emptyList(); private void ensureGeoTargetConstantSuggestionsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { geoTargetConstantSuggestions_ = new java.util.ArrayList<com.google.ads.googleads.v19.services.GeoTargetConstantSuggestion>(geoTargetConstantSuggestions_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v19.services.GeoTargetConstantSuggestion, com.google.ads.googleads.v19.services.GeoTargetConstantSuggestion.Builder, com.google.ads.googleads.v19.services.GeoTargetConstantSuggestionOrBuilder> geoTargetConstantSuggestionsBuilder_; /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v19.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public java.util.List<com.google.ads.googleads.v19.services.GeoTargetConstantSuggestion> getGeoTargetConstantSuggestionsList() { if (geoTargetConstantSuggestionsBuilder_ == null) { return java.util.Collections.unmodifiableList(geoTargetConstantSuggestions_); } else { return geoTargetConstantSuggestionsBuilder_.getMessageList(); } } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v19.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public int getGeoTargetConstantSuggestionsCount() { if (geoTargetConstantSuggestionsBuilder_ == null) { return geoTargetConstantSuggestions_.size(); } else { return geoTargetConstantSuggestionsBuilder_.getCount(); } } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v19.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public com.google.ads.googleads.v19.services.GeoTargetConstantSuggestion getGeoTargetConstantSuggestions(int index) { if (geoTargetConstantSuggestionsBuilder_ == null) { return geoTargetConstantSuggestions_.get(index); } else { return geoTargetConstantSuggestionsBuilder_.getMessage(index); } } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v19.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public Builder setGeoTargetConstantSuggestions( int index, com.google.ads.googleads.v19.services.GeoTargetConstantSuggestion value) { if (geoTargetConstantSuggestionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureGeoTargetConstantSuggestionsIsMutable(); geoTargetConstantSuggestions_.set(index, value); onChanged(); } else { geoTargetConstantSuggestionsBuilder_.setMessage(index, value); } return this; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v19.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public Builder setGeoTargetConstantSuggestions( int index, com.google.ads.googleads.v19.services.GeoTargetConstantSuggestion.Builder builderForValue) { if (geoTargetConstantSuggestionsBuilder_ == null) { ensureGeoTargetConstantSuggestionsIsMutable(); geoTargetConstantSuggestions_.set(index, builderForValue.build()); onChanged(); } else { geoTargetConstantSuggestionsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v19.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public Builder addGeoTargetConstantSuggestions(com.google.ads.googleads.v19.services.GeoTargetConstantSuggestion value) { if (geoTargetConstantSuggestionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureGeoTargetConstantSuggestionsIsMutable(); geoTargetConstantSuggestions_.add(value); onChanged(); } else { geoTargetConstantSuggestionsBuilder_.addMessage(value); } return this; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v19.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public Builder addGeoTargetConstantSuggestions( int index, com.google.ads.googleads.v19.services.GeoTargetConstantSuggestion value) { if (geoTargetConstantSuggestionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureGeoTargetConstantSuggestionsIsMutable(); geoTargetConstantSuggestions_.add(index, value); onChanged(); } else { geoTargetConstantSuggestionsBuilder_.addMessage(index, value); } return this; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v19.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public Builder addGeoTargetConstantSuggestions( com.google.ads.googleads.v19.services.GeoTargetConstantSuggestion.Builder builderForValue) { if (geoTargetConstantSuggestionsBuilder_ == null) { ensureGeoTargetConstantSuggestionsIsMutable(); geoTargetConstantSuggestions_.add(builderForValue.build()); onChanged(); } else { geoTargetConstantSuggestionsBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v19.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public Builder addGeoTargetConstantSuggestions( int index, com.google.ads.googleads.v19.services.GeoTargetConstantSuggestion.Builder builderForValue) { if (geoTargetConstantSuggestionsBuilder_ == null) { ensureGeoTargetConstantSuggestionsIsMutable(); geoTargetConstantSuggestions_.add(index, builderForValue.build()); onChanged(); } else { geoTargetConstantSuggestionsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v19.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public Builder addAllGeoTargetConstantSuggestions( java.lang.Iterable<? extends com.google.ads.googleads.v19.services.GeoTargetConstantSuggestion> values) { if (geoTargetConstantSuggestionsBuilder_ == null) { ensureGeoTargetConstantSuggestionsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, geoTargetConstantSuggestions_); onChanged(); } else { geoTargetConstantSuggestionsBuilder_.addAllMessages(values); } return this; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v19.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public Builder clearGeoTargetConstantSuggestions() { if (geoTargetConstantSuggestionsBuilder_ == null) { geoTargetConstantSuggestions_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { geoTargetConstantSuggestionsBuilder_.clear(); } return this; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v19.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public Builder removeGeoTargetConstantSuggestions(int index) { if (geoTargetConstantSuggestionsBuilder_ == null) { ensureGeoTargetConstantSuggestionsIsMutable(); geoTargetConstantSuggestions_.remove(index); onChanged(); } else { geoTargetConstantSuggestionsBuilder_.remove(index); } return this; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v19.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public com.google.ads.googleads.v19.services.GeoTargetConstantSuggestion.Builder getGeoTargetConstantSuggestionsBuilder( int index) { return getGeoTargetConstantSuggestionsFieldBuilder().getBuilder(index); } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v19.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public com.google.ads.googleads.v19.services.GeoTargetConstantSuggestionOrBuilder getGeoTargetConstantSuggestionsOrBuilder( int index) { if (geoTargetConstantSuggestionsBuilder_ == null) { return geoTargetConstantSuggestions_.get(index); } else { return geoTargetConstantSuggestionsBuilder_.getMessageOrBuilder(index); } } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v19.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public java.util.List<? extends com.google.ads.googleads.v19.services.GeoTargetConstantSuggestionOrBuilder> getGeoTargetConstantSuggestionsOrBuilderList() { if (geoTargetConstantSuggestionsBuilder_ != null) { return geoTargetConstantSuggestionsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(geoTargetConstantSuggestions_); } } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v19.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public com.google.ads.googleads.v19.services.GeoTargetConstantSuggestion.Builder addGeoTargetConstantSuggestionsBuilder() { return getGeoTargetConstantSuggestionsFieldBuilder().addBuilder( com.google.ads.googleads.v19.services.GeoTargetConstantSuggestion.getDefaultInstance()); } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v19.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public com.google.ads.googleads.v19.services.GeoTargetConstantSuggestion.Builder addGeoTargetConstantSuggestionsBuilder( int index) { return getGeoTargetConstantSuggestionsFieldBuilder().addBuilder( index, com.google.ads.googleads.v19.services.GeoTargetConstantSuggestion.getDefaultInstance()); } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v19.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public java.util.List<com.google.ads.googleads.v19.services.GeoTargetConstantSuggestion.Builder> getGeoTargetConstantSuggestionsBuilderList() { return getGeoTargetConstantSuggestionsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v19.services.GeoTargetConstantSuggestion, com.google.ads.googleads.v19.services.GeoTargetConstantSuggestion.Builder, com.google.ads.googleads.v19.services.GeoTargetConstantSuggestionOrBuilder> getGeoTargetConstantSuggestionsFieldBuilder() { if (geoTargetConstantSuggestionsBuilder_ == null) { geoTargetConstantSuggestionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v19.services.GeoTargetConstantSuggestion, com.google.ads.googleads.v19.services.GeoTargetConstantSuggestion.Builder, com.google.ads.googleads.v19.services.GeoTargetConstantSuggestionOrBuilder>( geoTargetConstantSuggestions_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); geoTargetConstantSuggestions_ = null; } return geoTargetConstantSuggestionsBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse) private static final com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse(); } public static com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SuggestGeoTargetConstantsResponse> PARSER = new com.google.protobuf.AbstractParser<SuggestGeoTargetConstantsResponse>() { @java.lang.Override public SuggestGeoTargetConstantsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<SuggestGeoTargetConstantsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SuggestGeoTargetConstantsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v19.services.SuggestGeoTargetConstantsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,373
google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/services/SuggestGeoTargetConstantsResponse.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v20/services/geo_target_constant_service.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v20.services; /** * <pre> * Response message for * [GeoTargetConstantService.SuggestGeoTargetConstants][google.ads.googleads.v20.services.GeoTargetConstantService.SuggestGeoTargetConstants]. * </pre> * * Protobuf type {@code google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse} */ public final class SuggestGeoTargetConstantsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse) SuggestGeoTargetConstantsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use SuggestGeoTargetConstantsResponse.newBuilder() to construct. private SuggestGeoTargetConstantsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SuggestGeoTargetConstantsResponse() { geoTargetConstantSuggestions_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new SuggestGeoTargetConstantsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.services.GeoTargetConstantServiceProto.internal_static_google_ads_googleads_v20_services_SuggestGeoTargetConstantsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.services.GeoTargetConstantServiceProto.internal_static_google_ads_googleads_v20_services_SuggestGeoTargetConstantsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse.class, com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse.Builder.class); } public static final int GEO_TARGET_CONSTANT_SUGGESTIONS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.ads.googleads.v20.services.GeoTargetConstantSuggestion> geoTargetConstantSuggestions_; /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v20.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ @java.lang.Override public java.util.List<com.google.ads.googleads.v20.services.GeoTargetConstantSuggestion> getGeoTargetConstantSuggestionsList() { return geoTargetConstantSuggestions_; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v20.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.ads.googleads.v20.services.GeoTargetConstantSuggestionOrBuilder> getGeoTargetConstantSuggestionsOrBuilderList() { return geoTargetConstantSuggestions_; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v20.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ @java.lang.Override public int getGeoTargetConstantSuggestionsCount() { return geoTargetConstantSuggestions_.size(); } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v20.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v20.services.GeoTargetConstantSuggestion getGeoTargetConstantSuggestions(int index) { return geoTargetConstantSuggestions_.get(index); } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v20.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v20.services.GeoTargetConstantSuggestionOrBuilder getGeoTargetConstantSuggestionsOrBuilder( int index) { return geoTargetConstantSuggestions_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < geoTargetConstantSuggestions_.size(); i++) { output.writeMessage(1, geoTargetConstantSuggestions_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < geoTargetConstantSuggestions_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, geoTargetConstantSuggestions_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse)) { return super.equals(obj); } com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse other = (com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse) obj; if (!getGeoTargetConstantSuggestionsList() .equals(other.getGeoTargetConstantSuggestionsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getGeoTargetConstantSuggestionsCount() > 0) { hash = (37 * hash) + GEO_TARGET_CONSTANT_SUGGESTIONS_FIELD_NUMBER; hash = (53 * hash) + getGeoTargetConstantSuggestionsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Response message for * [GeoTargetConstantService.SuggestGeoTargetConstants][google.ads.googleads.v20.services.GeoTargetConstantService.SuggestGeoTargetConstants]. * </pre> * * Protobuf type {@code google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse) com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.services.GeoTargetConstantServiceProto.internal_static_google_ads_googleads_v20_services_SuggestGeoTargetConstantsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.services.GeoTargetConstantServiceProto.internal_static_google_ads_googleads_v20_services_SuggestGeoTargetConstantsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse.class, com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse.Builder.class); } // Construct using com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (geoTargetConstantSuggestionsBuilder_ == null) { geoTargetConstantSuggestions_ = java.util.Collections.emptyList(); } else { geoTargetConstantSuggestions_ = null; geoTargetConstantSuggestionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v20.services.GeoTargetConstantServiceProto.internal_static_google_ads_googleads_v20_services_SuggestGeoTargetConstantsResponse_descriptor; } @java.lang.Override public com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse getDefaultInstanceForType() { return com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse build() { com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse buildPartial() { com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse result = new com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse result) { if (geoTargetConstantSuggestionsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { geoTargetConstantSuggestions_ = java.util.Collections.unmodifiableList(geoTargetConstantSuggestions_); bitField0_ = (bitField0_ & ~0x00000001); } result.geoTargetConstantSuggestions_ = geoTargetConstantSuggestions_; } else { result.geoTargetConstantSuggestions_ = geoTargetConstantSuggestionsBuilder_.build(); } } private void buildPartial0(com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse) { return mergeFrom((com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse other) { if (other == com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse.getDefaultInstance()) return this; if (geoTargetConstantSuggestionsBuilder_ == null) { if (!other.geoTargetConstantSuggestions_.isEmpty()) { if (geoTargetConstantSuggestions_.isEmpty()) { geoTargetConstantSuggestions_ = other.geoTargetConstantSuggestions_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureGeoTargetConstantSuggestionsIsMutable(); geoTargetConstantSuggestions_.addAll(other.geoTargetConstantSuggestions_); } onChanged(); } } else { if (!other.geoTargetConstantSuggestions_.isEmpty()) { if (geoTargetConstantSuggestionsBuilder_.isEmpty()) { geoTargetConstantSuggestionsBuilder_.dispose(); geoTargetConstantSuggestionsBuilder_ = null; geoTargetConstantSuggestions_ = other.geoTargetConstantSuggestions_; bitField0_ = (bitField0_ & ~0x00000001); geoTargetConstantSuggestionsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getGeoTargetConstantSuggestionsFieldBuilder() : null; } else { geoTargetConstantSuggestionsBuilder_.addAllMessages(other.geoTargetConstantSuggestions_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.ads.googleads.v20.services.GeoTargetConstantSuggestion m = input.readMessage( com.google.ads.googleads.v20.services.GeoTargetConstantSuggestion.parser(), extensionRegistry); if (geoTargetConstantSuggestionsBuilder_ == null) { ensureGeoTargetConstantSuggestionsIsMutable(); geoTargetConstantSuggestions_.add(m); } else { geoTargetConstantSuggestionsBuilder_.addMessage(m); } break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.ads.googleads.v20.services.GeoTargetConstantSuggestion> geoTargetConstantSuggestions_ = java.util.Collections.emptyList(); private void ensureGeoTargetConstantSuggestionsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { geoTargetConstantSuggestions_ = new java.util.ArrayList<com.google.ads.googleads.v20.services.GeoTargetConstantSuggestion>(geoTargetConstantSuggestions_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v20.services.GeoTargetConstantSuggestion, com.google.ads.googleads.v20.services.GeoTargetConstantSuggestion.Builder, com.google.ads.googleads.v20.services.GeoTargetConstantSuggestionOrBuilder> geoTargetConstantSuggestionsBuilder_; /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v20.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public java.util.List<com.google.ads.googleads.v20.services.GeoTargetConstantSuggestion> getGeoTargetConstantSuggestionsList() { if (geoTargetConstantSuggestionsBuilder_ == null) { return java.util.Collections.unmodifiableList(geoTargetConstantSuggestions_); } else { return geoTargetConstantSuggestionsBuilder_.getMessageList(); } } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v20.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public int getGeoTargetConstantSuggestionsCount() { if (geoTargetConstantSuggestionsBuilder_ == null) { return geoTargetConstantSuggestions_.size(); } else { return geoTargetConstantSuggestionsBuilder_.getCount(); } } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v20.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public com.google.ads.googleads.v20.services.GeoTargetConstantSuggestion getGeoTargetConstantSuggestions(int index) { if (geoTargetConstantSuggestionsBuilder_ == null) { return geoTargetConstantSuggestions_.get(index); } else { return geoTargetConstantSuggestionsBuilder_.getMessage(index); } } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v20.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public Builder setGeoTargetConstantSuggestions( int index, com.google.ads.googleads.v20.services.GeoTargetConstantSuggestion value) { if (geoTargetConstantSuggestionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureGeoTargetConstantSuggestionsIsMutable(); geoTargetConstantSuggestions_.set(index, value); onChanged(); } else { geoTargetConstantSuggestionsBuilder_.setMessage(index, value); } return this; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v20.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public Builder setGeoTargetConstantSuggestions( int index, com.google.ads.googleads.v20.services.GeoTargetConstantSuggestion.Builder builderForValue) { if (geoTargetConstantSuggestionsBuilder_ == null) { ensureGeoTargetConstantSuggestionsIsMutable(); geoTargetConstantSuggestions_.set(index, builderForValue.build()); onChanged(); } else { geoTargetConstantSuggestionsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v20.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public Builder addGeoTargetConstantSuggestions(com.google.ads.googleads.v20.services.GeoTargetConstantSuggestion value) { if (geoTargetConstantSuggestionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureGeoTargetConstantSuggestionsIsMutable(); geoTargetConstantSuggestions_.add(value); onChanged(); } else { geoTargetConstantSuggestionsBuilder_.addMessage(value); } return this; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v20.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public Builder addGeoTargetConstantSuggestions( int index, com.google.ads.googleads.v20.services.GeoTargetConstantSuggestion value) { if (geoTargetConstantSuggestionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureGeoTargetConstantSuggestionsIsMutable(); geoTargetConstantSuggestions_.add(index, value); onChanged(); } else { geoTargetConstantSuggestionsBuilder_.addMessage(index, value); } return this; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v20.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public Builder addGeoTargetConstantSuggestions( com.google.ads.googleads.v20.services.GeoTargetConstantSuggestion.Builder builderForValue) { if (geoTargetConstantSuggestionsBuilder_ == null) { ensureGeoTargetConstantSuggestionsIsMutable(); geoTargetConstantSuggestions_.add(builderForValue.build()); onChanged(); } else { geoTargetConstantSuggestionsBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v20.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public Builder addGeoTargetConstantSuggestions( int index, com.google.ads.googleads.v20.services.GeoTargetConstantSuggestion.Builder builderForValue) { if (geoTargetConstantSuggestionsBuilder_ == null) { ensureGeoTargetConstantSuggestionsIsMutable(); geoTargetConstantSuggestions_.add(index, builderForValue.build()); onChanged(); } else { geoTargetConstantSuggestionsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v20.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public Builder addAllGeoTargetConstantSuggestions( java.lang.Iterable<? extends com.google.ads.googleads.v20.services.GeoTargetConstantSuggestion> values) { if (geoTargetConstantSuggestionsBuilder_ == null) { ensureGeoTargetConstantSuggestionsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, geoTargetConstantSuggestions_); onChanged(); } else { geoTargetConstantSuggestionsBuilder_.addAllMessages(values); } return this; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v20.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public Builder clearGeoTargetConstantSuggestions() { if (geoTargetConstantSuggestionsBuilder_ == null) { geoTargetConstantSuggestions_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { geoTargetConstantSuggestionsBuilder_.clear(); } return this; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v20.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public Builder removeGeoTargetConstantSuggestions(int index) { if (geoTargetConstantSuggestionsBuilder_ == null) { ensureGeoTargetConstantSuggestionsIsMutable(); geoTargetConstantSuggestions_.remove(index); onChanged(); } else { geoTargetConstantSuggestionsBuilder_.remove(index); } return this; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v20.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public com.google.ads.googleads.v20.services.GeoTargetConstantSuggestion.Builder getGeoTargetConstantSuggestionsBuilder( int index) { return getGeoTargetConstantSuggestionsFieldBuilder().getBuilder(index); } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v20.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public com.google.ads.googleads.v20.services.GeoTargetConstantSuggestionOrBuilder getGeoTargetConstantSuggestionsOrBuilder( int index) { if (geoTargetConstantSuggestionsBuilder_ == null) { return geoTargetConstantSuggestions_.get(index); } else { return geoTargetConstantSuggestionsBuilder_.getMessageOrBuilder(index); } } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v20.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public java.util.List<? extends com.google.ads.googleads.v20.services.GeoTargetConstantSuggestionOrBuilder> getGeoTargetConstantSuggestionsOrBuilderList() { if (geoTargetConstantSuggestionsBuilder_ != null) { return geoTargetConstantSuggestionsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(geoTargetConstantSuggestions_); } } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v20.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public com.google.ads.googleads.v20.services.GeoTargetConstantSuggestion.Builder addGeoTargetConstantSuggestionsBuilder() { return getGeoTargetConstantSuggestionsFieldBuilder().addBuilder( com.google.ads.googleads.v20.services.GeoTargetConstantSuggestion.getDefaultInstance()); } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v20.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public com.google.ads.googleads.v20.services.GeoTargetConstantSuggestion.Builder addGeoTargetConstantSuggestionsBuilder( int index) { return getGeoTargetConstantSuggestionsFieldBuilder().addBuilder( index, com.google.ads.googleads.v20.services.GeoTargetConstantSuggestion.getDefaultInstance()); } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v20.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public java.util.List<com.google.ads.googleads.v20.services.GeoTargetConstantSuggestion.Builder> getGeoTargetConstantSuggestionsBuilderList() { return getGeoTargetConstantSuggestionsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v20.services.GeoTargetConstantSuggestion, com.google.ads.googleads.v20.services.GeoTargetConstantSuggestion.Builder, com.google.ads.googleads.v20.services.GeoTargetConstantSuggestionOrBuilder> getGeoTargetConstantSuggestionsFieldBuilder() { if (geoTargetConstantSuggestionsBuilder_ == null) { geoTargetConstantSuggestionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v20.services.GeoTargetConstantSuggestion, com.google.ads.googleads.v20.services.GeoTargetConstantSuggestion.Builder, com.google.ads.googleads.v20.services.GeoTargetConstantSuggestionOrBuilder>( geoTargetConstantSuggestions_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); geoTargetConstantSuggestions_ = null; } return geoTargetConstantSuggestionsBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse) private static final com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse(); } public static com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SuggestGeoTargetConstantsResponse> PARSER = new com.google.protobuf.AbstractParser<SuggestGeoTargetConstantsResponse>() { @java.lang.Override public SuggestGeoTargetConstantsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<SuggestGeoTargetConstantsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SuggestGeoTargetConstantsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v20.services.SuggestGeoTargetConstantsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,373
google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/services/SuggestGeoTargetConstantsResponse.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v21/services/geo_target_constant_service.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v21.services; /** * <pre> * Response message for * [GeoTargetConstantService.SuggestGeoTargetConstants][google.ads.googleads.v21.services.GeoTargetConstantService.SuggestGeoTargetConstants]. * </pre> * * Protobuf type {@code google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse} */ public final class SuggestGeoTargetConstantsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse) SuggestGeoTargetConstantsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use SuggestGeoTargetConstantsResponse.newBuilder() to construct. private SuggestGeoTargetConstantsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SuggestGeoTargetConstantsResponse() { geoTargetConstantSuggestions_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new SuggestGeoTargetConstantsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.services.GeoTargetConstantServiceProto.internal_static_google_ads_googleads_v21_services_SuggestGeoTargetConstantsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.services.GeoTargetConstantServiceProto.internal_static_google_ads_googleads_v21_services_SuggestGeoTargetConstantsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse.class, com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse.Builder.class); } public static final int GEO_TARGET_CONSTANT_SUGGESTIONS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.ads.googleads.v21.services.GeoTargetConstantSuggestion> geoTargetConstantSuggestions_; /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v21.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ @java.lang.Override public java.util.List<com.google.ads.googleads.v21.services.GeoTargetConstantSuggestion> getGeoTargetConstantSuggestionsList() { return geoTargetConstantSuggestions_; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v21.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.ads.googleads.v21.services.GeoTargetConstantSuggestionOrBuilder> getGeoTargetConstantSuggestionsOrBuilderList() { return geoTargetConstantSuggestions_; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v21.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ @java.lang.Override public int getGeoTargetConstantSuggestionsCount() { return geoTargetConstantSuggestions_.size(); } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v21.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v21.services.GeoTargetConstantSuggestion getGeoTargetConstantSuggestions(int index) { return geoTargetConstantSuggestions_.get(index); } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v21.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v21.services.GeoTargetConstantSuggestionOrBuilder getGeoTargetConstantSuggestionsOrBuilder( int index) { return geoTargetConstantSuggestions_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < geoTargetConstantSuggestions_.size(); i++) { output.writeMessage(1, geoTargetConstantSuggestions_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < geoTargetConstantSuggestions_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, geoTargetConstantSuggestions_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse)) { return super.equals(obj); } com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse other = (com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse) obj; if (!getGeoTargetConstantSuggestionsList() .equals(other.getGeoTargetConstantSuggestionsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getGeoTargetConstantSuggestionsCount() > 0) { hash = (37 * hash) + GEO_TARGET_CONSTANT_SUGGESTIONS_FIELD_NUMBER; hash = (53 * hash) + getGeoTargetConstantSuggestionsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Response message for * [GeoTargetConstantService.SuggestGeoTargetConstants][google.ads.googleads.v21.services.GeoTargetConstantService.SuggestGeoTargetConstants]. * </pre> * * Protobuf type {@code google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse) com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.services.GeoTargetConstantServiceProto.internal_static_google_ads_googleads_v21_services_SuggestGeoTargetConstantsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.services.GeoTargetConstantServiceProto.internal_static_google_ads_googleads_v21_services_SuggestGeoTargetConstantsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse.class, com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse.Builder.class); } // Construct using com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (geoTargetConstantSuggestionsBuilder_ == null) { geoTargetConstantSuggestions_ = java.util.Collections.emptyList(); } else { geoTargetConstantSuggestions_ = null; geoTargetConstantSuggestionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v21.services.GeoTargetConstantServiceProto.internal_static_google_ads_googleads_v21_services_SuggestGeoTargetConstantsResponse_descriptor; } @java.lang.Override public com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse getDefaultInstanceForType() { return com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse build() { com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse buildPartial() { com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse result = new com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse result) { if (geoTargetConstantSuggestionsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { geoTargetConstantSuggestions_ = java.util.Collections.unmodifiableList(geoTargetConstantSuggestions_); bitField0_ = (bitField0_ & ~0x00000001); } result.geoTargetConstantSuggestions_ = geoTargetConstantSuggestions_; } else { result.geoTargetConstantSuggestions_ = geoTargetConstantSuggestionsBuilder_.build(); } } private void buildPartial0(com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse) { return mergeFrom((com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse other) { if (other == com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse.getDefaultInstance()) return this; if (geoTargetConstantSuggestionsBuilder_ == null) { if (!other.geoTargetConstantSuggestions_.isEmpty()) { if (geoTargetConstantSuggestions_.isEmpty()) { geoTargetConstantSuggestions_ = other.geoTargetConstantSuggestions_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureGeoTargetConstantSuggestionsIsMutable(); geoTargetConstantSuggestions_.addAll(other.geoTargetConstantSuggestions_); } onChanged(); } } else { if (!other.geoTargetConstantSuggestions_.isEmpty()) { if (geoTargetConstantSuggestionsBuilder_.isEmpty()) { geoTargetConstantSuggestionsBuilder_.dispose(); geoTargetConstantSuggestionsBuilder_ = null; geoTargetConstantSuggestions_ = other.geoTargetConstantSuggestions_; bitField0_ = (bitField0_ & ~0x00000001); geoTargetConstantSuggestionsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getGeoTargetConstantSuggestionsFieldBuilder() : null; } else { geoTargetConstantSuggestionsBuilder_.addAllMessages(other.geoTargetConstantSuggestions_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.ads.googleads.v21.services.GeoTargetConstantSuggestion m = input.readMessage( com.google.ads.googleads.v21.services.GeoTargetConstantSuggestion.parser(), extensionRegistry); if (geoTargetConstantSuggestionsBuilder_ == null) { ensureGeoTargetConstantSuggestionsIsMutable(); geoTargetConstantSuggestions_.add(m); } else { geoTargetConstantSuggestionsBuilder_.addMessage(m); } break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.ads.googleads.v21.services.GeoTargetConstantSuggestion> geoTargetConstantSuggestions_ = java.util.Collections.emptyList(); private void ensureGeoTargetConstantSuggestionsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { geoTargetConstantSuggestions_ = new java.util.ArrayList<com.google.ads.googleads.v21.services.GeoTargetConstantSuggestion>(geoTargetConstantSuggestions_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v21.services.GeoTargetConstantSuggestion, com.google.ads.googleads.v21.services.GeoTargetConstantSuggestion.Builder, com.google.ads.googleads.v21.services.GeoTargetConstantSuggestionOrBuilder> geoTargetConstantSuggestionsBuilder_; /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v21.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public java.util.List<com.google.ads.googleads.v21.services.GeoTargetConstantSuggestion> getGeoTargetConstantSuggestionsList() { if (geoTargetConstantSuggestionsBuilder_ == null) { return java.util.Collections.unmodifiableList(geoTargetConstantSuggestions_); } else { return geoTargetConstantSuggestionsBuilder_.getMessageList(); } } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v21.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public int getGeoTargetConstantSuggestionsCount() { if (geoTargetConstantSuggestionsBuilder_ == null) { return geoTargetConstantSuggestions_.size(); } else { return geoTargetConstantSuggestionsBuilder_.getCount(); } } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v21.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public com.google.ads.googleads.v21.services.GeoTargetConstantSuggestion getGeoTargetConstantSuggestions(int index) { if (geoTargetConstantSuggestionsBuilder_ == null) { return geoTargetConstantSuggestions_.get(index); } else { return geoTargetConstantSuggestionsBuilder_.getMessage(index); } } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v21.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public Builder setGeoTargetConstantSuggestions( int index, com.google.ads.googleads.v21.services.GeoTargetConstantSuggestion value) { if (geoTargetConstantSuggestionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureGeoTargetConstantSuggestionsIsMutable(); geoTargetConstantSuggestions_.set(index, value); onChanged(); } else { geoTargetConstantSuggestionsBuilder_.setMessage(index, value); } return this; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v21.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public Builder setGeoTargetConstantSuggestions( int index, com.google.ads.googleads.v21.services.GeoTargetConstantSuggestion.Builder builderForValue) { if (geoTargetConstantSuggestionsBuilder_ == null) { ensureGeoTargetConstantSuggestionsIsMutable(); geoTargetConstantSuggestions_.set(index, builderForValue.build()); onChanged(); } else { geoTargetConstantSuggestionsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v21.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public Builder addGeoTargetConstantSuggestions(com.google.ads.googleads.v21.services.GeoTargetConstantSuggestion value) { if (geoTargetConstantSuggestionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureGeoTargetConstantSuggestionsIsMutable(); geoTargetConstantSuggestions_.add(value); onChanged(); } else { geoTargetConstantSuggestionsBuilder_.addMessage(value); } return this; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v21.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public Builder addGeoTargetConstantSuggestions( int index, com.google.ads.googleads.v21.services.GeoTargetConstantSuggestion value) { if (geoTargetConstantSuggestionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureGeoTargetConstantSuggestionsIsMutable(); geoTargetConstantSuggestions_.add(index, value); onChanged(); } else { geoTargetConstantSuggestionsBuilder_.addMessage(index, value); } return this; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v21.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public Builder addGeoTargetConstantSuggestions( com.google.ads.googleads.v21.services.GeoTargetConstantSuggestion.Builder builderForValue) { if (geoTargetConstantSuggestionsBuilder_ == null) { ensureGeoTargetConstantSuggestionsIsMutable(); geoTargetConstantSuggestions_.add(builderForValue.build()); onChanged(); } else { geoTargetConstantSuggestionsBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v21.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public Builder addGeoTargetConstantSuggestions( int index, com.google.ads.googleads.v21.services.GeoTargetConstantSuggestion.Builder builderForValue) { if (geoTargetConstantSuggestionsBuilder_ == null) { ensureGeoTargetConstantSuggestionsIsMutable(); geoTargetConstantSuggestions_.add(index, builderForValue.build()); onChanged(); } else { geoTargetConstantSuggestionsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v21.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public Builder addAllGeoTargetConstantSuggestions( java.lang.Iterable<? extends com.google.ads.googleads.v21.services.GeoTargetConstantSuggestion> values) { if (geoTargetConstantSuggestionsBuilder_ == null) { ensureGeoTargetConstantSuggestionsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, geoTargetConstantSuggestions_); onChanged(); } else { geoTargetConstantSuggestionsBuilder_.addAllMessages(values); } return this; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v21.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public Builder clearGeoTargetConstantSuggestions() { if (geoTargetConstantSuggestionsBuilder_ == null) { geoTargetConstantSuggestions_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { geoTargetConstantSuggestionsBuilder_.clear(); } return this; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v21.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public Builder removeGeoTargetConstantSuggestions(int index) { if (geoTargetConstantSuggestionsBuilder_ == null) { ensureGeoTargetConstantSuggestionsIsMutable(); geoTargetConstantSuggestions_.remove(index); onChanged(); } else { geoTargetConstantSuggestionsBuilder_.remove(index); } return this; } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v21.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public com.google.ads.googleads.v21.services.GeoTargetConstantSuggestion.Builder getGeoTargetConstantSuggestionsBuilder( int index) { return getGeoTargetConstantSuggestionsFieldBuilder().getBuilder(index); } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v21.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public com.google.ads.googleads.v21.services.GeoTargetConstantSuggestionOrBuilder getGeoTargetConstantSuggestionsOrBuilder( int index) { if (geoTargetConstantSuggestionsBuilder_ == null) { return geoTargetConstantSuggestions_.get(index); } else { return geoTargetConstantSuggestionsBuilder_.getMessageOrBuilder(index); } } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v21.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public java.util.List<? extends com.google.ads.googleads.v21.services.GeoTargetConstantSuggestionOrBuilder> getGeoTargetConstantSuggestionsOrBuilderList() { if (geoTargetConstantSuggestionsBuilder_ != null) { return geoTargetConstantSuggestionsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(geoTargetConstantSuggestions_); } } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v21.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public com.google.ads.googleads.v21.services.GeoTargetConstantSuggestion.Builder addGeoTargetConstantSuggestionsBuilder() { return getGeoTargetConstantSuggestionsFieldBuilder().addBuilder( com.google.ads.googleads.v21.services.GeoTargetConstantSuggestion.getDefaultInstance()); } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v21.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public com.google.ads.googleads.v21.services.GeoTargetConstantSuggestion.Builder addGeoTargetConstantSuggestionsBuilder( int index) { return getGeoTargetConstantSuggestionsFieldBuilder().addBuilder( index, com.google.ads.googleads.v21.services.GeoTargetConstantSuggestion.getDefaultInstance()); } /** * <pre> * Geo target constant suggestions. * </pre> * * <code>repeated .google.ads.googleads.v21.services.GeoTargetConstantSuggestion geo_target_constant_suggestions = 1;</code> */ public java.util.List<com.google.ads.googleads.v21.services.GeoTargetConstantSuggestion.Builder> getGeoTargetConstantSuggestionsBuilderList() { return getGeoTargetConstantSuggestionsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v21.services.GeoTargetConstantSuggestion, com.google.ads.googleads.v21.services.GeoTargetConstantSuggestion.Builder, com.google.ads.googleads.v21.services.GeoTargetConstantSuggestionOrBuilder> getGeoTargetConstantSuggestionsFieldBuilder() { if (geoTargetConstantSuggestionsBuilder_ == null) { geoTargetConstantSuggestionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v21.services.GeoTargetConstantSuggestion, com.google.ads.googleads.v21.services.GeoTargetConstantSuggestion.Builder, com.google.ads.googleads.v21.services.GeoTargetConstantSuggestionOrBuilder>( geoTargetConstantSuggestions_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); geoTargetConstantSuggestions_ = null; } return geoTargetConstantSuggestionsBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse) private static final com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse(); } public static com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SuggestGeoTargetConstantsResponse> PARSER = new com.google.protobuf.AbstractParser<SuggestGeoTargetConstantsResponse>() { @java.lang.Override public SuggestGeoTargetConstantsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<SuggestGeoTargetConstantsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SuggestGeoTargetConstantsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v21.services.SuggestGeoTargetConstantsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,365
java-dialogflow-cx/google-cloud-dialogflow-cx/src/main/java/com/google/cloud/dialogflow/cx/v3/stub/VersionsStubSettings.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.dialogflow.cx.v3.stub; import static com.google.cloud.dialogflow.cx.v3.VersionsClient.ListLocationsPagedResponse; import static com.google.cloud.dialogflow.cx.v3.VersionsClient.ListVersionsPagedResponse; import com.google.api.core.ApiFunction; import com.google.api.core.ApiFuture; import com.google.api.core.BetaApi; import com.google.api.core.ObsoleteApi; import com.google.api.gax.core.GaxProperties; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.GrpcTransportChannel; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.grpc.ProtoOperationTransformers; import com.google.api.gax.httpjson.GaxHttpJsonProperties; import com.google.api.gax.httpjson.HttpJsonTransportChannel; import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider; import com.google.api.gax.longrunning.OperationSnapshot; import com.google.api.gax.longrunning.OperationTimedPollAlgorithm; import com.google.api.gax.retrying.RetrySettings; import com.google.api.gax.rpc.ApiCallContext; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallSettings; import com.google.api.gax.rpc.PageContext; import com.google.api.gax.rpc.PagedCallSettings; import com.google.api.gax.rpc.PagedListDescriptor; import com.google.api.gax.rpc.PagedListResponseFactory; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest; import com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse; import com.google.cloud.dialogflow.cx.v3.CreateVersionOperationMetadata; import com.google.cloud.dialogflow.cx.v3.CreateVersionRequest; import com.google.cloud.dialogflow.cx.v3.DeleteVersionRequest; import com.google.cloud.dialogflow.cx.v3.GetVersionRequest; import com.google.cloud.dialogflow.cx.v3.ListVersionsRequest; import com.google.cloud.dialogflow.cx.v3.ListVersionsResponse; import com.google.cloud.dialogflow.cx.v3.LoadVersionRequest; import com.google.cloud.dialogflow.cx.v3.UpdateVersionRequest; import com.google.cloud.dialogflow.cx.v3.Version; import com.google.cloud.location.GetLocationRequest; import com.google.cloud.location.ListLocationsRequest; import com.google.cloud.location.ListLocationsResponse; import com.google.cloud.location.Location; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.longrunning.Operation; import com.google.protobuf.Empty; import com.google.protobuf.Struct; import java.io.IOException; import java.time.Duration; import java.util.List; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link VersionsStub}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (dialogflow.googleapis.com) and default port (443) are used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the * [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings) * of getVersion: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * VersionsStubSettings.Builder versionsSettingsBuilder = VersionsStubSettings.newBuilder(); * versionsSettingsBuilder * .getVersionSettings() * .setRetrySettings( * versionsSettingsBuilder * .getVersionSettings() * .getRetrySettings() * .toBuilder() * .setInitialRetryDelayDuration(Duration.ofSeconds(1)) * .setInitialRpcTimeoutDuration(Duration.ofSeconds(5)) * .setMaxAttempts(5) * .setMaxRetryDelayDuration(Duration.ofSeconds(30)) * .setMaxRpcTimeoutDuration(Duration.ofSeconds(60)) * .setRetryDelayMultiplier(1.3) * .setRpcTimeoutMultiplier(1.5) * .setTotalTimeoutDuration(Duration.ofSeconds(300)) * .build()); * VersionsStubSettings versionsSettings = versionsSettingsBuilder.build(); * }</pre> * * Please refer to the [Client Side Retry * Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for * additional support in setting retries. * * <p>To configure the RetrySettings of a Long Running Operation method, create an * OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to * configure the RetrySettings for createVersion: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * VersionsStubSettings.Builder versionsSettingsBuilder = VersionsStubSettings.newBuilder(); * TimedRetryAlgorithm timedRetryAlgorithm = * OperationalTimedPollAlgorithm.create( * RetrySettings.newBuilder() * .setInitialRetryDelayDuration(Duration.ofMillis(500)) * .setRetryDelayMultiplier(1.5) * .setMaxRetryDelayDuration(Duration.ofMillis(5000)) * .setTotalTimeoutDuration(Duration.ofHours(24)) * .build()); * versionsSettingsBuilder * .createClusterOperationSettings() * .setPollingAlgorithm(timedRetryAlgorithm) * .build(); * }</pre> */ @Generated("by gapic-generator-java") public class VersionsStubSettings extends StubSettings<VersionsStubSettings> { /** The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder() .add("https://www.googleapis.com/auth/cloud-platform") .add("https://www.googleapis.com/auth/dialogflow") .build(); private final PagedCallSettings< ListVersionsRequest, ListVersionsResponse, ListVersionsPagedResponse> listVersionsSettings; private final UnaryCallSettings<GetVersionRequest, Version> getVersionSettings; private final UnaryCallSettings<CreateVersionRequest, Operation> createVersionSettings; private final OperationCallSettings<CreateVersionRequest, Version, CreateVersionOperationMetadata> createVersionOperationSettings; private final UnaryCallSettings<UpdateVersionRequest, Version> updateVersionSettings; private final UnaryCallSettings<DeleteVersionRequest, Empty> deleteVersionSettings; private final UnaryCallSettings<LoadVersionRequest, Operation> loadVersionSettings; private final OperationCallSettings<LoadVersionRequest, Empty, Struct> loadVersionOperationSettings; private final UnaryCallSettings<CompareVersionsRequest, CompareVersionsResponse> compareVersionsSettings; private final PagedCallSettings< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings; private final UnaryCallSettings<GetLocationRequest, Location> getLocationSettings; private static final PagedListDescriptor<ListVersionsRequest, ListVersionsResponse, Version> LIST_VERSIONS_PAGE_STR_DESC = new PagedListDescriptor<ListVersionsRequest, ListVersionsResponse, Version>() { @Override public String emptyToken() { return ""; } @Override public ListVersionsRequest injectToken(ListVersionsRequest payload, String token) { return ListVersionsRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListVersionsRequest injectPageSize(ListVersionsRequest payload, int pageSize) { return ListVersionsRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(ListVersionsRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListVersionsResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<Version> extractResources(ListVersionsResponse payload) { return payload.getVersionsList(); } }; private static final PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location> LIST_LOCATIONS_PAGE_STR_DESC = new PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location>() { @Override public String emptyToken() { return ""; } @Override public ListLocationsRequest injectToken(ListLocationsRequest payload, String token) { return ListLocationsRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListLocationsRequest injectPageSize(ListLocationsRequest payload, int pageSize) { return ListLocationsRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(ListLocationsRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListLocationsResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<Location> extractResources(ListLocationsResponse payload) { return payload.getLocationsList(); } }; private static final PagedListResponseFactory< ListVersionsRequest, ListVersionsResponse, ListVersionsPagedResponse> LIST_VERSIONS_PAGE_STR_FACT = new PagedListResponseFactory< ListVersionsRequest, ListVersionsResponse, ListVersionsPagedResponse>() { @Override public ApiFuture<ListVersionsPagedResponse> getFuturePagedResponse( UnaryCallable<ListVersionsRequest, ListVersionsResponse> callable, ListVersionsRequest request, ApiCallContext context, ApiFuture<ListVersionsResponse> futureResponse) { PageContext<ListVersionsRequest, ListVersionsResponse, Version> pageContext = PageContext.create(callable, LIST_VERSIONS_PAGE_STR_DESC, request, context); return ListVersionsPagedResponse.createAsync(pageContext, futureResponse); } }; private static final PagedListResponseFactory< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> LIST_LOCATIONS_PAGE_STR_FACT = new PagedListResponseFactory< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>() { @Override public ApiFuture<ListLocationsPagedResponse> getFuturePagedResponse( UnaryCallable<ListLocationsRequest, ListLocationsResponse> callable, ListLocationsRequest request, ApiCallContext context, ApiFuture<ListLocationsResponse> futureResponse) { PageContext<ListLocationsRequest, ListLocationsResponse, Location> pageContext = PageContext.create(callable, LIST_LOCATIONS_PAGE_STR_DESC, request, context); return ListLocationsPagedResponse.createAsync(pageContext, futureResponse); } }; /** Returns the object with the settings used for calls to listVersions. */ public PagedCallSettings<ListVersionsRequest, ListVersionsResponse, ListVersionsPagedResponse> listVersionsSettings() { return listVersionsSettings; } /** Returns the object with the settings used for calls to getVersion. */ public UnaryCallSettings<GetVersionRequest, Version> getVersionSettings() { return getVersionSettings; } /** Returns the object with the settings used for calls to createVersion. */ public UnaryCallSettings<CreateVersionRequest, Operation> createVersionSettings() { return createVersionSettings; } /** Returns the object with the settings used for calls to createVersion. */ public OperationCallSettings<CreateVersionRequest, Version, CreateVersionOperationMetadata> createVersionOperationSettings() { return createVersionOperationSettings; } /** Returns the object with the settings used for calls to updateVersion. */ public UnaryCallSettings<UpdateVersionRequest, Version> updateVersionSettings() { return updateVersionSettings; } /** Returns the object with the settings used for calls to deleteVersion. */ public UnaryCallSettings<DeleteVersionRequest, Empty> deleteVersionSettings() { return deleteVersionSettings; } /** Returns the object with the settings used for calls to loadVersion. */ public UnaryCallSettings<LoadVersionRequest, Operation> loadVersionSettings() { return loadVersionSettings; } /** Returns the object with the settings used for calls to loadVersion. */ public OperationCallSettings<LoadVersionRequest, Empty, Struct> loadVersionOperationSettings() { return loadVersionOperationSettings; } /** Returns the object with the settings used for calls to compareVersions. */ public UnaryCallSettings<CompareVersionsRequest, CompareVersionsResponse> compareVersionsSettings() { return compareVersionsSettings; } /** Returns the object with the settings used for calls to listLocations. */ public PagedCallSettings<ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings() { return listLocationsSettings; } /** Returns the object with the settings used for calls to getLocation. */ public UnaryCallSettings<GetLocationRequest, Location> getLocationSettings() { return getLocationSettings; } public VersionsStub createStub() throws IOException { if (getTransportChannelProvider() .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcVersionsStub.create(this); } if (getTransportChannelProvider() .getTransportName() .equals(HttpJsonTransportChannel.getHttpJsonTransportName())) { return HttpJsonVersionsStub.create(this); } throw new UnsupportedOperationException( String.format( "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns the default service name. */ @Override public String getServiceName() { return "dialogflow"; } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstantiatingExecutorProvider.newBuilder(); } /** Returns the default service endpoint. */ @ObsoleteApi("Use getEndpoint() instead") public static String getDefaultEndpoint() { return "dialogflow.googleapis.com:443"; } /** Returns the default mTLS service endpoint. */ public static String getDefaultMtlsEndpoint() { return "dialogflow.mtls.googleapis.com:443"; } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return GoogleCredentialsProvider.newBuilder() .setScopesToApply(DEFAULT_SERVICE_SCOPES) .setUseJwtAccessWithScope(true); } /** Returns a builder for the default gRPC ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return InstantiatingGrpcChannelProvider.newBuilder() .setMaxInboundMessageSize(Integer.MAX_VALUE); } /** Returns a builder for the default REST ChannelProvider for this service. */ @BetaApi public static InstantiatingHttpJsonChannelProvider.Builder defaultHttpJsonTransportProviderBuilder() { return InstantiatingHttpJsonChannelProvider.newBuilder(); } public static TransportChannelProvider defaultTransportChannelProvider() { return defaultGrpcTransportProviderBuilder().build(); } public static ApiClientHeaderProvider.Builder defaultGrpcApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken("gapic", GaxProperties.getLibraryVersion(VersionsStubSettings.class)) .setTransportToken( GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion()); } public static ApiClientHeaderProvider.Builder defaultHttpJsonApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken("gapic", GaxProperties.getLibraryVersion(VersionsStubSettings.class)) .setTransportToken( GaxHttpJsonProperties.getHttpJsonTokenName(), GaxHttpJsonProperties.getHttpJsonVersion()); } public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return VersionsStubSettings.defaultGrpcApiClientHeaderProviderBuilder(); } /** Returns a new gRPC builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new REST builder for this class. */ public static Builder newHttpJsonBuilder() { return Builder.createHttpJsonDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected VersionsStubSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); listVersionsSettings = settingsBuilder.listVersionsSettings().build(); getVersionSettings = settingsBuilder.getVersionSettings().build(); createVersionSettings = settingsBuilder.createVersionSettings().build(); createVersionOperationSettings = settingsBuilder.createVersionOperationSettings().build(); updateVersionSettings = settingsBuilder.updateVersionSettings().build(); deleteVersionSettings = settingsBuilder.deleteVersionSettings().build(); loadVersionSettings = settingsBuilder.loadVersionSettings().build(); loadVersionOperationSettings = settingsBuilder.loadVersionOperationSettings().build(); compareVersionsSettings = settingsBuilder.compareVersionsSettings().build(); listLocationsSettings = settingsBuilder.listLocationsSettings().build(); getLocationSettings = settingsBuilder.getLocationSettings().build(); } /** Builder for VersionsStubSettings. */ public static class Builder extends StubSettings.Builder<VersionsStubSettings, Builder> { private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders; private final PagedCallSettings.Builder< ListVersionsRequest, ListVersionsResponse, ListVersionsPagedResponse> listVersionsSettings; private final UnaryCallSettings.Builder<GetVersionRequest, Version> getVersionSettings; private final UnaryCallSettings.Builder<CreateVersionRequest, Operation> createVersionSettings; private final OperationCallSettings.Builder< CreateVersionRequest, Version, CreateVersionOperationMetadata> createVersionOperationSettings; private final UnaryCallSettings.Builder<UpdateVersionRequest, Version> updateVersionSettings; private final UnaryCallSettings.Builder<DeleteVersionRequest, Empty> deleteVersionSettings; private final UnaryCallSettings.Builder<LoadVersionRequest, Operation> loadVersionSettings; private final OperationCallSettings.Builder<LoadVersionRequest, Empty, Struct> loadVersionOperationSettings; private final UnaryCallSettings.Builder<CompareVersionsRequest, CompareVersionsResponse> compareVersionsSettings; private final PagedCallSettings.Builder< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings; private final UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings; private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions = ImmutableMap.builder(); definitions.put( "retry_policy_0_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList(StatusCode.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder(); RetrySettings settings = null; settings = RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(100L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelayDuration(Duration.ofMillis(60000L)) .setInitialRpcTimeoutDuration(Duration.ofMillis(60000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ofMillis(60000L)) .setTotalTimeoutDuration(Duration.ofMillis(60000L)) .build(); definitions.put("retry_policy_0_params", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(clientContext); listVersionsSettings = PagedCallSettings.newBuilder(LIST_VERSIONS_PAGE_STR_FACT); getVersionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); createVersionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); createVersionOperationSettings = OperationCallSettings.newBuilder(); updateVersionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); deleteVersionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); loadVersionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); loadVersionOperationSettings = OperationCallSettings.newBuilder(); compareVersionsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); listLocationsSettings = PagedCallSettings.newBuilder(LIST_LOCATIONS_PAGE_STR_FACT); getLocationSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( listVersionsSettings, getVersionSettings, createVersionSettings, updateVersionSettings, deleteVersionSettings, loadVersionSettings, compareVersionsSettings, listLocationsSettings, getLocationSettings); initDefaults(this); } protected Builder(VersionsStubSettings settings) { super(settings); listVersionsSettings = settings.listVersionsSettings.toBuilder(); getVersionSettings = settings.getVersionSettings.toBuilder(); createVersionSettings = settings.createVersionSettings.toBuilder(); createVersionOperationSettings = settings.createVersionOperationSettings.toBuilder(); updateVersionSettings = settings.updateVersionSettings.toBuilder(); deleteVersionSettings = settings.deleteVersionSettings.toBuilder(); loadVersionSettings = settings.loadVersionSettings.toBuilder(); loadVersionOperationSettings = settings.loadVersionOperationSettings.toBuilder(); compareVersionsSettings = settings.compareVersionsSettings.toBuilder(); listLocationsSettings = settings.listLocationsSettings.toBuilder(); getLocationSettings = settings.getLocationSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( listVersionsSettings, getVersionSettings, createVersionSettings, updateVersionSettings, deleteVersionSettings, loadVersionSettings, compareVersionsSettings, listLocationsSettings, getLocationSettings); } private static Builder createDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder createHttpJsonDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultHttpJsonTransportProviderBuilder().build()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultHttpJsonApiClientHeaderProviderBuilder().build()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder initDefaults(Builder builder) { builder .listVersionsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .getVersionSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .createVersionSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .updateVersionSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .deleteVersionSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .loadVersionSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .compareVersionsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .listLocationsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .getLocationSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .createVersionOperationSettings() .setInitialCallSettings( UnaryCallSettings .<CreateVersionRequest, OperationSnapshot>newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(Version.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create( CreateVersionOperationMetadata.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(5000L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelayDuration(Duration.ofMillis(45000L)) .setInitialRpcTimeoutDuration(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ZERO) .setTotalTimeoutDuration(Duration.ofMillis(300000L)) .build())); builder .loadVersionOperationSettings() .setInitialCallSettings( UnaryCallSettings.<LoadVersionRequest, OperationSnapshot>newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(Empty.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(Struct.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(5000L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelayDuration(Duration.ofMillis(45000L)) .setInitialRpcTimeoutDuration(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ZERO) .setTotalTimeoutDuration(Duration.ofMillis(300000L)) .build())); return builder; } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); return this; } public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() { return unaryMethodSettingsBuilders; } /** Returns the builder for the settings used for calls to listVersions. */ public PagedCallSettings.Builder< ListVersionsRequest, ListVersionsResponse, ListVersionsPagedResponse> listVersionsSettings() { return listVersionsSettings; } /** Returns the builder for the settings used for calls to getVersion. */ public UnaryCallSettings.Builder<GetVersionRequest, Version> getVersionSettings() { return getVersionSettings; } /** Returns the builder for the settings used for calls to createVersion. */ public UnaryCallSettings.Builder<CreateVersionRequest, Operation> createVersionSettings() { return createVersionSettings; } /** Returns the builder for the settings used for calls to createVersion. */ public OperationCallSettings.Builder< CreateVersionRequest, Version, CreateVersionOperationMetadata> createVersionOperationSettings() { return createVersionOperationSettings; } /** Returns the builder for the settings used for calls to updateVersion. */ public UnaryCallSettings.Builder<UpdateVersionRequest, Version> updateVersionSettings() { return updateVersionSettings; } /** Returns the builder for the settings used for calls to deleteVersion. */ public UnaryCallSettings.Builder<DeleteVersionRequest, Empty> deleteVersionSettings() { return deleteVersionSettings; } /** Returns the builder for the settings used for calls to loadVersion. */ public UnaryCallSettings.Builder<LoadVersionRequest, Operation> loadVersionSettings() { return loadVersionSettings; } /** Returns the builder for the settings used for calls to loadVersion. */ public OperationCallSettings.Builder<LoadVersionRequest, Empty, Struct> loadVersionOperationSettings() { return loadVersionOperationSettings; } /** Returns the builder for the settings used for calls to compareVersions. */ public UnaryCallSettings.Builder<CompareVersionsRequest, CompareVersionsResponse> compareVersionsSettings() { return compareVersionsSettings; } /** Returns the builder for the settings used for calls to listLocations. */ public PagedCallSettings.Builder< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings() { return listLocationsSettings; } /** Returns the builder for the settings used for calls to getLocation. */ public UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings() { return getLocationSettings; } @Override public VersionsStubSettings build() throws IOException { return new VersionsStubSettings(this); } } }
apache/ignite
35,083
modules/core/src/test/java/org/apache/ignite/testframework/junits/multijvm/IgniteProcessProxy.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.testframework.junits.multijvm; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; import java.util.function.Supplier; import javax.cache.CacheException; import org.apache.ignite.DataRegionMetrics; import org.apache.ignite.DataRegionMetricsAdapter; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteAtomicLong; import org.apache.ignite.IgniteAtomicReference; import org.apache.ignite.IgniteAtomicSequence; import org.apache.ignite.IgniteAtomicStamped; import org.apache.ignite.IgniteBinary; import org.apache.ignite.IgniteCache; import org.apache.ignite.IgniteCompute; import org.apache.ignite.IgniteCountDownLatch; import org.apache.ignite.IgniteDataStreamer; import org.apache.ignite.IgniteEncryption; import org.apache.ignite.IgniteEvents; import org.apache.ignite.IgniteException; import org.apache.ignite.IgniteIllegalStateException; import org.apache.ignite.IgniteLock; import org.apache.ignite.IgniteLogger; import org.apache.ignite.IgniteMessaging; import org.apache.ignite.IgniteQueue; import org.apache.ignite.IgniteScheduler; import org.apache.ignite.IgniteSemaphore; import org.apache.ignite.IgniteServices; import org.apache.ignite.IgniteSet; import org.apache.ignite.IgniteSnapshot; import org.apache.ignite.IgniteTransactions; import org.apache.ignite.Ignition; import org.apache.ignite.MemoryMetrics; import org.apache.ignite.cache.affinity.Affinity; import org.apache.ignite.cluster.ClusterGroup; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.configuration.AtomicConfiguration; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.CollectionConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.configuration.NearCacheConfiguration; import org.apache.ignite.events.DiscoveryEvent; import org.apache.ignite.events.Event; import org.apache.ignite.events.EventType; import org.apache.ignite.internal.GridKernalContext; import org.apache.ignite.internal.IgniteEx; import org.apache.ignite.internal.IgniteInterruptedCheckedException; import org.apache.ignite.internal.cluster.IgniteClusterEx; import org.apache.ignite.internal.management.IgniteCommandRegistry; import org.apache.ignite.internal.processors.cache.GridCacheUtilityKey; import org.apache.ignite.internal.processors.cache.IgniteInternalCache; import org.apache.ignite.internal.util.GridJavaProcess; import org.apache.ignite.internal.util.lang.IgnitePredicateX; import org.apache.ignite.internal.util.typedef.G; import org.apache.ignite.internal.util.typedef.X; import org.apache.ignite.internal.util.typedef.internal.A; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteBiTuple; import org.apache.ignite.lang.IgniteCallable; import org.apache.ignite.lang.IgniteInClosure; import org.apache.ignite.lang.IgnitePredicate; import org.apache.ignite.lang.IgniteProductVersion; import org.apache.ignite.lang.IgniteRunnable; import org.apache.ignite.metric.IgniteMetrics; import org.apache.ignite.plugin.IgnitePlugin; import org.apache.ignite.plugin.PluginNotFoundException; import org.apache.ignite.resources.IgniteInstanceResource; import org.apache.ignite.spi.tracing.TracingConfigurationManager; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import static org.junit.Assert.fail; /** * Ignite proxy for ignite instance at another JVM. */ @SuppressWarnings("TransientFieldInNonSerializableClass") public class IgniteProcessProxy implements IgniteEx { /** Grid proxies. */ private static final transient ConcurrentMap<String, IgniteProcessProxy> gridProxies = new ConcurrentHashMap<>(); /** Property that specify alternative {@code JAVA_HOME}. */ private static final String TEST_MULTIJVM_JAVA_HOME = "test.multijvm.java.home"; /** Waiting milliseconds of the left of a node to topology. */ private static final long NODE_LEFT_TIMEOUT = 30_000L; /** Jvm process with ignite instance. */ private final transient GridJavaProcess proc; /** Configuration. */ private final transient IgniteConfiguration cfg; /** Local JVM grid. */ private final transient Supplier<Ignite> locJvmGrid; /** Logger. */ private final transient IgniteLogger log; /** Grid id. */ private final UUID id; /** * @param cfg Configuration. * @param log Logger. * @param locJvmGrid Local JVM grid. * @throws Exception On error. */ public IgniteProcessProxy(IgniteConfiguration cfg, IgniteLogger log, Ignite locJvmGrid) throws Exception { this(cfg, log, locJvmGrid == null ? null : () -> locJvmGrid, true); } /** * @param cfg Configuration. * @param log Logger. * @param locJvmGrid Local JVM grid. * @throws Exception On error. */ public IgniteProcessProxy(IgniteConfiguration cfg, IgniteLogger log, Supplier<Ignite> locJvmGrid, boolean discovery) throws Exception { this(cfg, log, locJvmGrid, discovery, Collections.emptyList()); } /** * @param cfg Configuration. * @param log Logger. * @param locJvmGrid Local JVM grid. * @param resetDiscovery Reset DiscoverySpi at the configuration. * @throws Exception On error. */ public IgniteProcessProxy( IgniteConfiguration cfg, IgniteLogger log, Supplier<Ignite> locJvmGrid, boolean resetDiscovery, List<String> additionalArgs ) throws Exception { this.cfg = cfg; this.id = cfg.getNodeId() == null ? UUID.randomUUID() : cfg.getNodeId(); this.locJvmGrid = locJvmGrid; this.log = logger(log, "jvm-" + id.toString().substring(0, id.toString().indexOf('-'))); final String javaHome = System.getProperty(TEST_MULTIJVM_JAVA_HOME); validateRemoteJre(javaHome); String params = params(cfg, resetDiscovery); Collection<String> filteredJvmArgs = filteredJvmArgs(); filteredJvmArgs.addAll(additionalArgs); final CountDownLatch rmtNodeStartedLatch = new CountDownLatch(1); if (locJvmGrid != null) locJvmGrid.get().events() .localListen(new NodeStartedListener(id, rmtNodeStartedLatch), EventType.EVT_NODE_JOINED); proc = GridJavaProcess.exec( igniteNodeRunnerClassName(), params, this.log, // Optional closure to be called each time wrapped process prints line to system.out or system.err. (IgniteInClosure<String>)this.log::info, null, javaHome, filteredJvmArgs, System.getProperty("surefire.test.class.path") ); if (locJvmGrid != null) assert rmtNodeStartedLatch.await(30, TimeUnit.SECONDS) : "Remote node has not joined [id=" + id + ']'; IgniteProcessProxy prevVal = gridProxies.putIfAbsent(cfg.getIgniteInstanceName(), this); if (prevVal != null) { remoteCompute().run(new StopGridTask(cfg.getIgniteInstanceName(), true)); throw new IllegalStateException("There was found instance assotiated with " + cfg.getIgniteInstanceName() + ", instance= " + prevVal + ". New started node was stopped."); } } /** * Validates that the JRE corresponding to the given Java home is valid for use as a remote JVM. * This currently means only checking that its major version matches the major version of the JRE we run on. * * @param javaHome Java home. * @throws IOException If I/O fails when interacting with 'java' process. * @throws InterruptedException If we get interrupted. */ private static void validateRemoteJre(@Nullable String javaHome) throws IOException, InterruptedException { int remoteMajorVer = new JavaVersionCommand().majorVersion(javaHome); int locMajorVer = U.majorJavaVersion(System.getProperty("java.version")); if (locMajorVer != remoteMajorVer) { fail("Version of remote java with home at '" + javaHome + "' (" + remoteMajorVer + ") is different from local java version (" + locMajorVer + "). " + "Make sure test.multijvm.java.home property specifies a path to a correct Java installation"); } } /** * Creates new logger instance based on given logger and given category. * * @param log Base logger. * @param ctgr Category. * @return Initiated logger. * @throws Exception In case of an error. */ protected IgniteLogger logger(IgniteLogger log, Object ctgr) throws Exception { return log.getLogger(ctgr); } /** * Gets Ignite node runner class name. * * @return Node runner class name. * @throws Exception In case of an error. */ protected String igniteNodeRunnerClassName() throws Exception { return IgniteNodeRunner.class.getCanonicalName(); } /** * Creates parameters which will be passed to new Ignite Process as command line arguments. * * @param cfg Configuration. * @param resetDiscovery Reset DiscoverySpi at the configuration. * @return Params to be passed to new Ignite process. * @throws Exception In case of an error. */ protected String params(IgniteConfiguration cfg, boolean resetDiscovery) throws Exception { return IgniteNodeRunner.storeToFile(cfg.setNodeId(id), resetDiscovery); } /** * Creates list of JVM arguments to be used to start new Ignite process in separate JVM. * * @return JVM arguments. * @throws Exception In case of an error. */ protected Collection<String> filteredJvmArgs() throws Exception { Collection<String> filteredJvmArgs = new ArrayList<>(); filteredJvmArgs.add("-ea"); for (String arg : U.jvmArgs()) { if (arg.startsWith("-Xmx") || arg.startsWith("-Xms") || arg.startsWith("-cp") || arg.startsWith("-classpath") || arg.startsWith("--add-opens") || arg.startsWith("--add-exports") || arg.startsWith("--add-modules") || arg.startsWith("--patch-module") || arg.startsWith("--add-reads") || arg.startsWith("-XX:+IgnoreUnrecognizedVMOptions")) filteredJvmArgs.add(arg); } return filteredJvmArgs; } /** */ private static class NodeStartedListener extends IgnitePredicateX<Event> { /** Id. */ private final UUID id; /** Remote node started latch. */ private final CountDownLatch rmtNodeStartedLatch; /** * @param id Id. * @param rmtNodeStartedLatch Remote node started latch. */ NodeStartedListener(UUID id, CountDownLatch rmtNodeStartedLatch) { this.id = id; this.rmtNodeStartedLatch = rmtNodeStartedLatch; } /** {@inheritDoc} */ @Override public boolean applyx(Event e) { if (((DiscoveryEvent)e).eventNode().id().equals(id)) { rmtNodeStartedLatch.countDown(); return false; } return true; } } /** * @param igniteInstanceName Ignite instance name. * @return Instance by name or exception wiil be thrown. */ public static IgniteProcessProxy ignite(String igniteInstanceName) { IgniteProcessProxy res = gridProxies.get(igniteInstanceName); if (res == null) throw new IgniteIllegalStateException("Grid instance was not properly started " + "or was already stopped: " + igniteInstanceName + ". All known grid instances: " + gridProxies.keySet()); return res; } /** * Gracefully shut down the Grid. * * @param igniteInstanceName Ignite instance name. * @param cancel If {@code true} then all jobs currently will be cancelled. * @throws Exception In case of the node stopping error. */ public static void stop(String igniteInstanceName, boolean cancel) throws Exception { final IgniteProcessProxy proxy = gridProxies.get(igniteInstanceName); if (proxy != null) { final CountDownLatch rmtNodeStoppedLatch = new CountDownLatch(1); final UUID rmNodeId = proxy.getId(); proxy.localJvmGrid().events().localListen(new IgnitePredicateX<Event>() { @Override public boolean applyx(Event e) { if (((DiscoveryEvent)e).eventNode().id().equals(rmNodeId)) { rmtNodeStoppedLatch.countDown(); return false; } return true; } }, EventType.EVT_NODE_LEFT); try { proxy.remoteCompute().runAsync(new StopGridTask(igniteInstanceName, cancel)); if (!rmtNodeStoppedLatch.await(NODE_LEFT_TIMEOUT, TimeUnit.MILLISECONDS)) throw new IllegalStateException("Remote node has not stopped [id=" + rmNodeId + ']'); } catch (Throwable t) { proxy.log().error("Failed to stop grid [igniteInstanceName=" + igniteInstanceName + ", cancel=" + cancel + ']', t); throw t; } proxy.getProcess().kill(); gridProxies.remove(igniteInstanceName, proxy); } } /** * Forcefully shut down the Grid. * * @param igniteInstanceName Ignite instance name. */ public static void kill(String igniteInstanceName) { A.notNull(igniteInstanceName, "igniteInstanceName"); IgniteProcessProxy proxy = gridProxies.get(igniteInstanceName); if (proxy == null) return; if (proxy == null) return; try { proxy.getProcess().kill(); } catch (Exception e) { U.error(proxy.log, "Exception while killing " + igniteInstanceName, e); } gridProxies.remove(igniteInstanceName, proxy); } /** * @param locNodeId ID of local node the requested grid instance is managing. * @return An instance of named grid. This method never returns {@code null}. * @throws IgniteIllegalStateException Thrown if grid was not properly initialized or grid instance was stopped or * was not started. */ public static Ignite ignite(UUID locNodeId) { A.notNull(locNodeId, "locNodeId"); for (IgniteProcessProxy ignite : gridProxies.values()) { if (ignite.getId().equals(locNodeId)) return ignite; } throw new IgniteIllegalStateException("Grid instance with given local node ID was not properly " + "started or was stopped: " + locNodeId); } /** * Kill all running processes. */ public static void killAll() { for (IgniteProcessProxy ignite : gridProxies.values()) { try { ignite.getProcess().kill(); } catch (Exception e) { U.error(ignite.log, "Killing failed.", e); } } gridProxies.clear(); } /** * @return Local JVM grid instance. */ public Ignite localJvmGrid() { return locJvmGrid.get(); } /** * @return Grid id. */ public UUID getId() { return id; } /** * @throws Exception If failed to kill. */ public void kill() throws Exception { getProcess().kill(); gridProxies.remove(cfg.getGridName(), this); } /** {@inheritDoc} */ @Override public String name() { return cfg.getIgniteInstanceName(); } /** {@inheritDoc} */ @Override public IgniteLogger log() { return log; } /** {@inheritDoc} */ @Override public IgniteConfiguration configuration() { return cfg; } /** {@inheritDoc} */ @Override public <K extends GridCacheUtilityKey, V> IgniteInternalCache<K, V> utilityCache() { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Nullable @Override public <K, V> IgniteInternalCache<K, V> cachex(@Nullable String name) { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public Collection<IgniteInternalCache<?, ?>> cachesx( @Nullable IgnitePredicate<? super IgniteInternalCache<?, ?>>... p) { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public boolean eventUserRecordable(int type) { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public boolean allEventsUserRecordable(int[] types) { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public boolean isJmxRemoteEnabled() { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public boolean isRestartEnabled() { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public IgniteClusterEx cluster() { return new IgniteClusterProcessProxy(this); } /** {@inheritDoc} */ @Nullable @Override public String latestVersion() { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public ClusterNode localNode() { return remoteCompute().call(new NodeTask()); } /** {@inheritDoc} */ @Override public GridKernalContext context() { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public boolean isRebalanceEnabled() { return true; } /** {@inheritDoc} */ @Override public void rebalanceEnabled(boolean rebalanceEnabled) { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public <T> IgniteSet<T> set(String name, int cacheId, boolean collocated, boolean separated) throws IgniteException { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public IgniteCommandRegistry commandsRegistry() { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public IgniteCompute compute() { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public IgniteMetrics metrics() { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public IgniteCompute compute(ClusterGroup grp) { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public IgniteMessaging message() { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public IgniteMessaging message(ClusterGroup grp) { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public IgniteEvents events() { return new IgniteEventsProcessProxy(this); } /** {@inheritDoc} */ @Override public IgniteEvents events(ClusterGroup grp) { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public IgniteServices services() { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public IgniteServices services(ClusterGroup grp) { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public ExecutorService executorService() { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public ExecutorService executorService(ClusterGroup grp) { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public IgniteProductVersion version() { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public IgniteScheduler scheduler() { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public <K, V> IgniteCache<K, V> createCache(CacheConfiguration<K, V> cacheCfg) { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public Collection<IgniteCache> createCaches(Collection<CacheConfiguration> cacheCfgs) { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public <K, V> IgniteCache<K, V> createCache(String cacheName) { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public <K, V> IgniteCache<K, V> getOrCreateCache(CacheConfiguration<K, V> cacheCfg) { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public <K, V> IgniteCache<K, V> getOrCreateCache(String cacheName) { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public Collection<IgniteCache> getOrCreateCaches(Collection<CacheConfiguration> cacheCfgs) { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public <K, V> void addCacheConfiguration(CacheConfiguration<K, V> cacheCfg) { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public <K, V> IgniteCache<K, V> createCache(CacheConfiguration<K, V> cacheCfg, NearCacheConfiguration<K, V> nearCfg) { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public <K, V> IgniteCache<K, V> getOrCreateCache(CacheConfiguration<K, V> cacheCfg, NearCacheConfiguration<K, V> nearCfg) { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public <K, V> IgniteCache<K, V> createNearCache( @Nullable String cacheName, NearCacheConfiguration<K, V> nearCfg) { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public <K, V> IgniteCache<K, V> getOrCreateNearCache(@Nullable String cacheName, NearCacheConfiguration<K, V> nearCfg) { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public <K, V> IgniteBiTuple<IgniteCache<K, V>, Boolean> getOrCreateCache0( CacheConfiguration<K, V> cacheCfg, boolean sql) { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public void destroyCache(String cacheName) { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public boolean destroyCache0(String cacheName, boolean sql) throws CacheException { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public void destroyCaches(Collection<String> cacheNames) { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public <K, V> IgniteCache<K, V> cache(@Nullable final String name) { return new IgniteCacheProcessProxy<>(name, this); } /** {@inheritDoc} */ @Override public Collection<String> cacheNames() { return localJvmGrid().cacheNames(); } /** {@inheritDoc} */ @Override public IgniteTransactions transactions() { throw new UnsupportedOperationException("Transactions can't be supported automatically in multi JVM mode."); } /** {@inheritDoc} */ @Override public <K, V> IgniteDataStreamer<K, V> dataStreamer(@Nullable String cacheName) { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public IgniteAtomicSequence atomicSequence(String name, long initVal, boolean create) throws IgniteException { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public IgniteAtomicSequence atomicSequence(String name, AtomicConfiguration cfg, long initVal, boolean create) throws IgniteException { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public IgniteAtomicLong atomicLong(String name, long initVal, boolean create) throws IgniteException { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public IgniteAtomicLong atomicLong(String name, AtomicConfiguration cfg, long initVal, boolean create) throws IgniteException { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public <T> IgniteAtomicReference<T> atomicReference(String name, @Nullable T initVal, boolean create) throws IgniteException { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public <T> IgniteAtomicReference<T> atomicReference(String name, AtomicConfiguration cfg, @Nullable T initVal, boolean create) throws IgniteException { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public <T, S> IgniteAtomicStamped<T, S> atomicStamped( String name, @Nullable T initVal, @Nullable S initStamp, boolean create) throws IgniteException { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public <T, S> IgniteAtomicStamped<T, S> atomicStamped(String name, AtomicConfiguration cfg, @Nullable T initVal, @Nullable S initStamp, boolean create) throws IgniteException { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public IgniteCountDownLatch countDownLatch(String name, int cnt, boolean autoDel, boolean create) throws IgniteException { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public IgniteSemaphore semaphore(String name, int cnt, boolean failoverSafe, boolean create) throws IgniteException { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public IgniteLock reentrantLock(String name, boolean failoverSafe, boolean fair, boolean create) throws IgniteException { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public <T> IgniteQueue<T> queue(String name, int cap, @Nullable CollectionConfiguration cfg) throws IgniteException { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public <T> IgniteSet<T> set(String name, @Nullable CollectionConfiguration cfg) throws IgniteException { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public <T extends IgnitePlugin> T plugin(String name) throws PluginNotFoundException { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public IgniteBinary binary() { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public void resetLostPartitions(Collection<String> cacheNames) { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public Collection<DataRegionMetrics> dataRegionMetrics() { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Nullable @Override public DataRegionMetrics dataRegionMetrics(String memPlcName) { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public IgniteEncryption encryption() { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public IgniteSnapshot snapshot() { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public Collection<MemoryMetrics> memoryMetrics() { return DataRegionMetricsAdapter.collectionOf(dataRegionMetrics()); } /** {@inheritDoc} */ @Nullable @Override public MemoryMetrics memoryMetrics(String memPlcName) { return DataRegionMetricsAdapter.valueOf(dataRegionMetrics(memPlcName)); } /** {@inheritDoc} */ @Override public @NotNull TracingConfigurationManager tracingConfiguration() { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public Ignite withApplicationAttributes(Map<String, String> attrs) { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public void close() throws IgniteException { if (localJvmGrid() != null) { final CountDownLatch rmtNodeStoppedLatch = new CountDownLatch(1); localJvmGrid().events().localListen(new IgnitePredicateX<Event>() { @Override public boolean applyx(Event e) { if (((DiscoveryEvent)e).eventNode().id().equals(id)) { rmtNodeStoppedLatch.countDown(); return false; } return true; } }, EventType.EVT_NODE_LEFT, EventType.EVT_NODE_FAILED); compute().run(new StopGridTask(localJvmGrid().name(), true)); try { assert U.await(rmtNodeStoppedLatch, 15, TimeUnit.SECONDS) : "NodeId=" + id; } catch (IgniteInterruptedCheckedException e) { throw new IgniteException(e); } } try { getProcess().kill(); } catch (Exception e) { X.printerr("Could not kill process after close.", e); } } /** {@inheritDoc} */ @Override public <K> Affinity<K> affinity(String cacheName) { return new AffinityProcessProxy<>(cacheName, this); } /** {@inheritDoc} */ @Override public boolean active() { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** {@inheritDoc} */ @Override public void active(boolean active) { throw new UnsupportedOperationException("Operation isn't supported yet."); } /** * @return Jvm process in which grid node started. */ public GridJavaProcess getProcess() { return proc; } /** * @return {@link IgniteCompute} instance to communicate with remote node. */ public IgniteCompute remoteCompute() { Ignite locJvmGrid = localJvmGrid(); if (locJvmGrid == null) return null; ClusterGroup grp = locJvmGrid.cluster().forNodeId(id); if (grp.nodes().isEmpty()) throw new IllegalStateException("Could not found node with id=" + id + "."); return locJvmGrid.compute(grp); } /** * Executes {@link Ignition#stop(String, boolean)} with given arguments in a separated thread, doesn't wait up the * fulfillment. */ private static class StopGridTask implements IgniteRunnable { /** Ignite instance name. */ private final String igniteInstanceName; /** Cancel. */ private final boolean cancel; /** * @param igniteInstanceName Ignite instance name. * @param cancel Cancel. */ public StopGridTask(String igniteInstanceName, boolean cancel) { this.igniteInstanceName = igniteInstanceName; this.cancel = cancel; } /** {@inheritDoc} */ @Override public void run() { CompletableFuture.runAsync(() -> G.stop(igniteInstanceName, cancel)); } } /** * */ private static class NodeTask implements IgniteCallable<ClusterNode> { /** Ignite. */ @IgniteInstanceResource private Ignite ignite; /** {@inheritDoc} */ @Override public ClusterNode call() throws Exception { return ((IgniteEx)ignite).localNode(); } } }
apache/ozone
35,335
hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/TestBlockDeletion.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.ozone.container.common.statemachine.commandhandler; import static java.lang.Math.max; import static java.nio.charset.StandardCharsets.UTF_8; import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_COMMAND_STATUS_REPORT_INTERVAL; import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_CONTAINER_REPORT_INTERVAL; import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_HEARTBEAT_INTERVAL; import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_EXPIRED_CONTAINER_REPLICA_OP_SCRUB_INTERVAL; import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_PIPELINE_OWNER_CONTAINER_COUNT; import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_STALENODE_INTERVAL; import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_BLOCK_DELETING_SERVICE_INTERVAL; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.time.Duration; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.stream.Stream; import org.apache.commons.lang3.RandomStringUtils; import org.apache.hadoop.hdds.client.ECReplicationConfig; import org.apache.hadoop.hdds.client.RatisReplicationConfig; import org.apache.hadoop.hdds.client.ReplicationConfig; import org.apache.hadoop.hdds.client.ReplicationFactor; import org.apache.hadoop.hdds.client.ReplicationType; import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos; import org.apache.hadoop.hdds.ratis.RatisHelper; import org.apache.hadoop.hdds.scm.ScmConfig; import org.apache.hadoop.hdds.scm.ScmConfigKeys; import org.apache.hadoop.hdds.scm.block.DeletedBlockLogImpl; import org.apache.hadoop.hdds.scm.block.SCMBlockDeletingService; import org.apache.hadoop.hdds.scm.block.ScmBlockDeletingServiceMetrics; import org.apache.hadoop.hdds.scm.container.ContainerID; import org.apache.hadoop.hdds.scm.container.ContainerInfo; import org.apache.hadoop.hdds.scm.container.ContainerReplica; import org.apache.hadoop.hdds.scm.container.ContainerStateManager; import org.apache.hadoop.hdds.scm.container.replication.ReplicationManager; import org.apache.hadoop.hdds.scm.server.StorageContainerManager; import org.apache.hadoop.hdds.server.events.EventQueue; import org.apache.hadoop.hdds.utils.IOUtils; import org.apache.hadoop.hdds.utils.db.Table; import org.apache.hadoop.ozone.HddsDatanodeService; import org.apache.hadoop.ozone.MiniOzoneCluster; import org.apache.hadoop.ozone.OzoneTestUtils; import org.apache.hadoop.ozone.client.ObjectStore; import org.apache.hadoop.ozone.client.OzoneBucket; import org.apache.hadoop.ozone.client.OzoneClient; import org.apache.hadoop.ozone.client.OzoneVolume; import org.apache.hadoop.ozone.client.io.OzoneOutputStream; import org.apache.hadoop.ozone.container.TestHelper; import org.apache.hadoop.ozone.container.common.helpers.BlockData; import org.apache.hadoop.ozone.container.common.impl.ContainerData; import org.apache.hadoop.ozone.container.common.impl.ContainerSet; import org.apache.hadoop.ozone.container.common.interfaces.Container; import org.apache.hadoop.ozone.container.common.interfaces.DBHandle; import org.apache.hadoop.ozone.container.common.statemachine.DatanodeConfiguration; import org.apache.hadoop.ozone.container.keyvalue.KeyValueContainerData; import org.apache.hadoop.ozone.container.keyvalue.helpers.BlockUtils; import org.apache.hadoop.ozone.om.OzoneManager; import org.apache.hadoop.ozone.om.helpers.OmKeyArgs; import org.apache.hadoop.ozone.om.helpers.OmKeyLocationInfo; import org.apache.hadoop.ozone.om.helpers.OmKeyLocationInfoGroup; import org.apache.hadoop.ozone.om.protocol.OzoneManagerProtocol; import org.apache.hadoop.util.Time; import org.apache.ozone.test.GenericTestUtils; import org.apache.ozone.test.GenericTestUtils.LogCapturer; import org.apache.ozone.test.tag.Flaky; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.slf4j.event.Level; /** * Tests for Block deletion. */ public class TestBlockDeletion { private static final Logger LOG = LoggerFactory.getLogger(TestBlockDeletion.class); private OzoneConfiguration conf = null; private ObjectStore store; private MiniOzoneCluster cluster = null; private StorageContainerManager scm = null; private OzoneManager om = null; private OzoneManagerProtocol writeClient; private Set<Long> containerIdsWithDeletedBlocks; private long maxTransactionId = 0; private ScmBlockDeletingServiceMetrics metrics; private OzoneClient client; @BeforeEach public void init() throws Exception { conf = new OzoneConfiguration(); GenericTestUtils.setLogLevel(DeletedBlockLogImpl.class, Level.DEBUG); GenericTestUtils.setLogLevel(SCMBlockDeletingService.class, Level.DEBUG); GenericTestUtils.setLogLevel(ReplicationManager.class, Level.DEBUG); conf.set("ozone.replication.allowed-configs", "^(RATIS/THREE)|(EC/2-1-256k)$"); conf.setTimeDuration(OZONE_BLOCK_DELETING_SERVICE_INTERVAL, 100, TimeUnit.MILLISECONDS); DatanodeConfiguration datanodeConfiguration = conf.getObject( DatanodeConfiguration.class); datanodeConfiguration.setBlockDeletionInterval(Duration.ofMillis(100)); conf.setFromObject(datanodeConfiguration); ScmConfig scmConfig = conf.getObject(ScmConfig.class); scmConfig.setBlockDeletionInterval(Duration.ofMillis(100)); conf.setFromObject(scmConfig); conf.setTimeDuration(RatisHelper.HDDS_DATANODE_RATIS_PREFIX_KEY + ".client.request.write.timeout", 30, TimeUnit.SECONDS); conf.setTimeDuration(RatisHelper.HDDS_DATANODE_RATIS_PREFIX_KEY + ".client.request.watch.timeout", 30, TimeUnit.SECONDS); conf.setTimeDuration(HDDS_HEARTBEAT_INTERVAL, 50, TimeUnit.MILLISECONDS); conf.setTimeDuration(HDDS_CONTAINER_REPORT_INTERVAL, 200, TimeUnit.MILLISECONDS); conf.setTimeDuration(HDDS_COMMAND_STATUS_REPORT_INTERVAL, 200, TimeUnit.MILLISECONDS); conf.setTimeDuration(OZONE_SCM_STALENODE_INTERVAL, 3, TimeUnit.SECONDS); conf.setBoolean(ScmConfigKeys.OZONE_SCM_PIPELINE_AUTO_CREATE_FACTOR_ONE, false); conf.setTimeDuration(OZONE_SCM_EXPIRED_CONTAINER_REPLICA_OP_SCRUB_INTERVAL, 100, TimeUnit.MILLISECONDS); conf.setInt(OZONE_SCM_PIPELINE_OWNER_CONTAINER_COUNT, 100); conf.setInt(ScmConfigKeys.OZONE_DATANODE_PIPELINE_LIMIT, 1); conf.setQuietMode(false); conf.setTimeDuration("hdds.scm.replication.event.timeout", 2, TimeUnit.SECONDS); conf.setTimeDuration("hdds.scm.replication.event.timeout.datanode.offset", 0, TimeUnit.MILLISECONDS); conf.setInt("hdds.datanode.block.delete.threads.max", 5); conf.setInt("hdds.datanode.block.delete.queue.limit", 32); ReplicationManager.ReplicationManagerConfiguration replicationConf = conf .getObject(ReplicationManager.ReplicationManagerConfiguration.class); replicationConf.setInterval(Duration.ofSeconds(300)); conf.setFromObject(replicationConf); cluster = MiniOzoneCluster.newBuilder(conf) .setNumDatanodes(3) .build(); cluster.waitForClusterToBeReady(); client = cluster.newClient(); store = client.getObjectStore(); om = cluster.getOzoneManager(); writeClient = store .getClientProxy().getOzoneManagerClient(); scm = cluster.getStorageContainerManager(); containerIdsWithDeletedBlocks = new HashSet<>(); metrics = scm.getScmBlockManager().getSCMBlockDeletingService() .getMetrics(); } @AfterEach public void cleanup() throws IOException { IOUtils.closeQuietly(client); if (cluster != null) { cluster.shutdown(); } } private static Stream<ReplicationConfig> replicationConfigs() { return Stream.of( ReplicationConfig.fromTypeAndFactor( ReplicationType.RATIS, ReplicationFactor.THREE), new ECReplicationConfig("rs-2-1-256k")); } @ParameterizedTest @MethodSource("replicationConfigs") @Flaky("HDDS-9962") public void testBlockDeletion(ReplicationConfig repConfig) throws Exception { String volumeName = UUID.randomUUID().toString(); String bucketName = UUID.randomUUID().toString(); LogCapturer logCapturer = LogCapturer.captureLogs(DeleteBlocksCommandHandler.class); String value = RandomStringUtils.secure().next(1024 * 1024); store.createVolume(volumeName); OzoneVolume volume = store.getVolume(volumeName); volume.createBucket(bucketName); OzoneBucket bucket = volume.getBucket(bucketName); String keyName = UUID.randomUUID().toString(); OzoneOutputStream out = bucket.createKey(keyName, value.getBytes(UTF_8).length, repConfig, new HashMap<>()); for (int i = 0; i < 10; i++) { out.write(value.getBytes(UTF_8)); } out.close(); OmKeyArgs keyArgs = new OmKeyArgs.Builder().setVolumeName(volumeName) .setBucketName(bucketName).setKeyName(keyName).setDataSize(0) .setReplicationConfig(repConfig) .build(); List<OmKeyLocationInfoGroup> omKeyLocationInfoGroupList = om.lookupKey(keyArgs).getKeyLocationVersions(); // verify key blocks were created in DN. GenericTestUtils.waitFor(() -> { try { scm.getScmHAManager().asSCMHADBTransactionBuffer().flush(); verifyBlocksCreated(omKeyLocationInfoGroupList); return true; } catch (Throwable t) { LOG.warn("Verify blocks creation failed", t); return false; } }, 1000, 10000); // No containers with deleted blocks assertThat(containerIdsWithDeletedBlocks).isEmpty(); // Delete transactionIds for the containers should be 0. // NOTE: this test assumes that all the container is KetValueContainer. If // other container types is going to be added, this test should be checked. matchContainerTransactionIds(); assertEquals(0L, metrics.getNumBlockDeletionTransactionCreated()); writeClient.deleteKey(keyArgs); Thread.sleep(5000); // The blocks should not be deleted in the DN as the container is open Throwable e = assertThrows(AssertionError.class, () -> verifyBlocksDeleted(omKeyLocationInfoGroupList)); assertTrue( e.getMessage().startsWith("expected: <null> but was:")); assertEquals(0L, metrics.getNumBlockDeletionTransactionsOnDatanodes()); // close the containers which hold the blocks for the key OzoneTestUtils.closeAllContainers(scm.getEventQueue(), scm); // If any container present as not closed, i.e. matches some entry // not closed, then return false for wait ContainerSet containerSet = cluster.getHddsDatanodes().get(0) .getDatanodeStateMachine().getContainer().getContainerSet(); GenericTestUtils.waitFor(() -> { return !(omKeyLocationInfoGroupList.stream().anyMatch((group) -> group.getLocationList().stream().anyMatch((info) -> containerSet.getContainer(info.getContainerID()).getContainerData() .getState() != ContainerProtos.ContainerDataProto.State.CLOSED ) )); }, 1000, 30000); // The blocks should be deleted in the DN. GenericTestUtils.waitFor(() -> { try { scm.getScmHAManager().asSCMHADBTransactionBuffer().flush(); verifyBlocksDeleted(omKeyLocationInfoGroupList); return true; } catch (Throwable t) { LOG.warn("Verify blocks deletion failed", t); return false; } }, 2000, 30000); // Few containers with deleted blocks assertThat(containerIdsWithDeletedBlocks).isNotEmpty(); // Containers in the DN and SCM should have same delete transactionIds matchContainerTransactionIds(); // Verify transactions committed GenericTestUtils.waitFor(() -> { try { scm.getScmHAManager().asSCMHADBTransactionBuffer().flush(); verifyTransactionsCommitted(); return true; } catch (Throwable t) { LOG.warn("Container closing failed", t); return false; } }, 500, 10000); // Containers in the DN and SCM should have same delete transactionIds // after DN restart. The assertion is just to verify that the state of // containerInfos in dn and scm is consistent after dn restart. cluster.restartHddsDatanode(0, true); matchContainerTransactionIds(); assertEquals(metrics.getNumBlockDeletionTransactionCreated(), metrics.getNumBlockDeletionTransactionCompleted()); assertEquals(metrics.getNumBlockDeletionCommandSent(), metrics.getNumCommandsDatanodeSent()); assertEquals(metrics.getNumBlockDeletionCommandSuccess(), metrics.getNumCommandsDatanodeSuccess()); assertEquals(metrics.getBNumBlockDeletionCommandFailure(), metrics.getNumCommandsDatanodeFailed()); assertThat(metrics.getNumBlockDeletionCommandSent()) .isGreaterThanOrEqualTo(metrics.getNumBlockDeletionCommandSuccess() + metrics.getBNumBlockDeletionCommandFailure()); assertThat(metrics.getNumBlockDeletionTransactionsOnDatanodes()) .isGreaterThanOrEqualTo(metrics.getNumBlockDeletionTransactionFailureOnDatanodes() + metrics.getNumBlockDeletionTransactionSuccessOnDatanodes()); LOG.info(metrics.toString()); // Datanode should receive retried requests with continuous retry counts. for (int i = 5; i >= 0; i--) { if (logCapturer.getOutput().contains("1(" + i + ")")) { for (int j = 0; j <= i; j++) { assertThat(logCapturer.getOutput()) .contains("1(" + i + ")"); } break; } } } @Test public void testContainerStatisticsAfterDelete() throws Exception { ReplicationManager replicationManager = scm.getReplicationManager(); String volumeName = UUID.randomUUID().toString(); String bucketName = UUID.randomUUID().toString(); String value = RandomStringUtils.secure().next(1024 * 1024); store.createVolume(volumeName); OzoneVolume volume = store.getVolume(volumeName); volume.createBucket(bucketName); OzoneBucket bucket = volume.getBucket(bucketName); String keyName = UUID.randomUUID().toString(); OzoneOutputStream out = bucket.createKey(keyName, value.getBytes(UTF_8).length, ReplicationType.RATIS, ReplicationFactor.THREE, new HashMap<>()); out.write(value.getBytes(UTF_8)); out.close(); OmKeyArgs keyArgs = new OmKeyArgs.Builder().setVolumeName(volumeName) .setBucketName(bucketName).setKeyName(keyName).setDataSize(0) .setReplicationConfig( RatisReplicationConfig .getInstance(HddsProtos.ReplicationFactor.THREE)) .build(); Thread.sleep(5000); List<ContainerInfo> containerInfos = scm.getContainerManager().getContainers(); final int valueSize = value.getBytes(UTF_8).length; final int keyCount = 1; containerInfos.stream().forEach(container -> { assertEquals(valueSize, container.getUsedBytes()); assertEquals(keyCount, container.getNumberOfKeys()); }); OzoneTestUtils.closeAllContainers(scm.getEventQueue(), scm); // Wait for container to close Thread.sleep(2000); writeClient.deleteKey(keyArgs); // Wait for blocks to be deleted and container reports to be processed GenericTestUtils.waitFor(() -> { try { scm.getScmHAManager().asSCMHADBTransactionBuffer().flush(); } catch (IOException e) { throw new RuntimeException(e); } return scm.getContainerManager().getContainers().stream() .allMatch(c -> c.getUsedBytes() == 0 && c.getNumberOfKeys() == 0); }, 500, 20000); Thread.sleep(5000); // Verify that pending block delete num are as expected with resent cmds cluster.getHddsDatanodes().forEach(dn -> { Map<Long, Container<?>> containerMap = dn.getDatanodeStateMachine() .getContainer().getContainerSet().getContainerMap(); containerMap.values().forEach(container -> { KeyValueContainerData containerData = (KeyValueContainerData)container.getContainerData(); assertEquals(0, containerData.getNumPendingDeletionBlocks()); }); }); LogCapturer logCapturer = LogCapturer.captureLogs(ReplicationManager.class); logCapturer.clearOutput(); cluster.shutdownHddsDatanode(0); replicationManager.processAll(); ((EventQueue)scm.getEventQueue()).processAll(1000); containerInfos = scm.getContainerManager().getContainers(); containerInfos.stream().forEach(container -> assertEquals(HddsProtos.LifeCycleState.DELETING, container.getState())); Thread.sleep(5000); replicationManager.processAll(); ((EventQueue) scm.getEventQueue()).processAll(1000); String expectedOutput = "Sending delete command for container"; GenericTestUtils.waitFor(() -> logCapturer.getOutput() .contains(expectedOutput), 500, 5000); cluster.restartHddsDatanode(0, true); Thread.sleep(2000); GenericTestUtils.waitFor(() -> { replicationManager.processAll(); ((EventQueue)scm.getEventQueue()).processAll(1000); List<ContainerInfo> infos = scm.getContainerManager().getContainers(); try { infos.stream().forEach(container -> { assertEquals(HddsProtos.LifeCycleState.DELETED, container.getState()); try { scm.getScmHAManager().asSCMHADBTransactionBuffer().flush(); assertEquals(HddsProtos.LifeCycleState.DELETED, scm.getScmMetadataStore().getContainerTable() .get(container.containerID()).getState()); } catch (IOException e) { fail( "Container from SCM DB should be marked as DELETED"); } }); } catch (Throwable e) { LOG.info(e.getMessage()); return false; } return true; }, 500, 15000); LOG.info(metrics.toString()); } @Test public void testContainerStateAfterDNRestart() throws Exception { ReplicationManager replicationManager = scm.getReplicationManager(); String volumeName = UUID.randomUUID().toString(); String bucketName = UUID.randomUUID().toString(); String value = RandomStringUtils.secure().next(10 * 10); store.createVolume(volumeName); OzoneVolume volume = store.getVolume(volumeName); volume.createBucket(bucketName); OzoneBucket bucket = volume.getBucket(bucketName); String keyName = UUID.randomUUID().toString(); OzoneOutputStream out = bucket.createKey(keyName, value.getBytes(UTF_8).length, ReplicationType.RATIS, ReplicationFactor.THREE, new HashMap<>()); out.write(value.getBytes(UTF_8)); out.close(); OmKeyArgs keyArgs = new OmKeyArgs.Builder().setVolumeName(volumeName) .setBucketName(bucketName).setKeyName(keyName).setDataSize(0) .setReplicationConfig( RatisReplicationConfig .getInstance(HddsProtos.ReplicationFactor.THREE)) .build(); List<OmKeyLocationInfoGroup> omKeyLocationInfoGroupList = om.lookupKey(keyArgs).getKeyLocationVersions(); Thread.sleep(5000); List<ContainerInfo> containerInfos = scm.getContainerManager().getContainers(); final int valueSize = value.getBytes(UTF_8).length; final int keyCount = 1; List<Long> containerIdList = new ArrayList<>(); containerInfos.stream().forEach(container -> { assertEquals(valueSize, container.getUsedBytes()); assertEquals(keyCount, container.getNumberOfKeys()); containerIdList.add(container.getContainerID()); }); OzoneTestUtils.closeAllContainers(scm.getEventQueue(), scm); // Wait for container to close TestHelper.waitForContainerClose(cluster, containerIdList.toArray(new Long[0])); // make sure the containers are closed on the dn omKeyLocationInfoGroupList.forEach((group) -> { List<OmKeyLocationInfo> locationInfo = group.getLocationList(); locationInfo.forEach( (info) -> cluster.getHddsDatanodes().get(0).getDatanodeStateMachine() .getContainer().getContainerSet() .getContainer(info.getContainerID()).getContainerData() .setState(ContainerProtos.ContainerDataProto.State.CLOSED)); }); ContainerID containerId = ContainerID.valueOf( containerInfos.get(0).getContainerID()); // Before restart container state is non-empty assertFalse(getContainerFromDN( cluster.getHddsDatanodes().get(0), containerId.getId()) .getContainerData().isEmpty()); // Restart DataNode cluster.restartHddsDatanode(0, true); // After restart also container state remains non-empty. assertFalse(getContainerFromDN( cluster.getHddsDatanodes().get(0), containerId.getId()) .getContainerData().isEmpty()); // Delete key writeClient.deleteKey(keyArgs); Thread.sleep(10000); GenericTestUtils.waitFor(() -> { try { scm.getScmHAManager().asSCMHADBTransactionBuffer().flush(); return scm.getContainerManager().getContainerReplicas( containerId).stream(). allMatch(replica -> replica.isEmpty()); } catch (IOException e) { throw new RuntimeException(e); } }, 100, 10 * 1000); // Container state should be empty now as key got deleted assertTrue(getContainerFromDN( cluster.getHddsDatanodes().get(0), containerId.getId()) .getContainerData().isEmpty()); // Restart DataNode cluster.restartHddsDatanode(0, true); // Container state should be empty even after restart assertTrue(getContainerFromDN( cluster.getHddsDatanodes().get(0), containerId.getId()) .getContainerData().isEmpty()); GenericTestUtils.waitFor(() -> { replicationManager.processAll(); ((EventQueue)scm.getEventQueue()).processAll(1000); List<ContainerInfo> infos = scm.getContainerManager().getContainers(); try { infos.stream().forEach(container -> { assertEquals(HddsProtos.LifeCycleState.DELETED, container.getState()); try { scm.getScmHAManager().asSCMHADBTransactionBuffer().flush(); assertEquals(HddsProtos.LifeCycleState.DELETED, scm.getScmMetadataStore().getContainerTable() .get(container.containerID()).getState()); } catch (IOException e) { fail( "Container from SCM DB should be marked as DELETED"); } }); } catch (Throwable e) { LOG.info(e.getMessage()); return false; } return true; }, 500, 30000); } /** * Return the container for the given containerID from the given DN. */ private Container getContainerFromDN(HddsDatanodeService hddsDatanodeService, long containerID) { return hddsDatanodeService.getDatanodeStateMachine().getContainer() .getContainerSet().getContainer(containerID); } @Test public void testContainerDeleteWithInvalidKeyCount() throws Exception { ReplicationManager replicationManager = scm.getReplicationManager(); String volumeName = UUID.randomUUID().toString(); String bucketName = UUID.randomUUID().toString(); String value = RandomStringUtils.secure().next(1024 * 1024); store.createVolume(volumeName); OzoneVolume volume = store.getVolume(volumeName); volume.createBucket(bucketName); OzoneBucket bucket = volume.getBucket(bucketName); String keyName = UUID.randomUUID().toString(); OzoneOutputStream out = bucket.createKey(keyName, value.getBytes(UTF_8).length, ReplicationType.RATIS, ReplicationFactor.THREE, new HashMap<>()); out.write(value.getBytes(UTF_8)); out.close(); OmKeyArgs keyArgs = new OmKeyArgs.Builder().setVolumeName(volumeName) .setBucketName(bucketName).setKeyName(keyName).setDataSize(0) .setReplicationConfig( RatisReplicationConfig .getInstance(HddsProtos.ReplicationFactor.THREE)) .build(); List<OmKeyLocationInfoGroup> omKeyLocationInfoGroupList = om.lookupKey(keyArgs).getKeyLocationVersions(); Thread.sleep(5000); List<ContainerInfo> containerInfos = scm.getContainerManager().getContainers(); final int valueSize = value.getBytes(UTF_8).length; final int keyCount = 1; List<Long> containerIdList = new ArrayList<>(); containerInfos.forEach(container -> { assertEquals(valueSize, container.getUsedBytes()); assertEquals(keyCount, container.getNumberOfKeys()); containerIdList.add(container.getContainerID()); }); OzoneTestUtils.closeAllContainers(scm.getEventQueue(), scm); // Wait for container to close TestHelper.waitForContainerClose(cluster, containerIdList.toArray(new Long[0])); // make sure the containers are closed on the dn omKeyLocationInfoGroupList.forEach((group) -> { List<OmKeyLocationInfo> locationInfo = group.getLocationList(); locationInfo.forEach( (info) -> cluster.getHddsDatanodes().get(0).getDatanodeStateMachine() .getContainer().getContainerSet() .getContainer(info.getContainerID()).getContainerData() .setState(ContainerProtos.ContainerDataProto.State.CLOSED)); }); ContainerStateManager containerStateManager = scm.getContainerManager() .getContainerStateManager(); ContainerID containerId = ContainerID.valueOf( containerInfos.get(0).getContainerID()); // Get all the replicas state from SCM Set<ContainerReplica> replicas = scm.getContainerManager().getContainerReplicas(containerId); // Ensure for all replica isEmpty are false in SCM assertTrue(scm.getContainerManager().getContainerReplicas( containerId).stream(). allMatch(replica -> !replica.isEmpty())); // Delete key writeClient.deleteKey(keyArgs); Thread.sleep(5000); // Ensure isEmpty are true for all replica after delete key GenericTestUtils.waitFor(() -> { try { scm.getScmHAManager().asSCMHADBTransactionBuffer().flush(); return scm.getContainerManager().getContainerReplicas( containerId).stream() .allMatch(replica -> replica.isEmpty()); } catch (IOException e) { throw new RuntimeException(e); } }, 500, 5 * 2000); // Update container replica by making invalid keyCount in one replica ContainerReplica replicaOne = ContainerReplica.newBuilder() .setContainerID(containerId) .setKeyCount(10) .setContainerState(StorageContainerDatanodeProtocolProtos .ContainerReplicaProto.State.CLOSED) .setDatanodeDetails(replicas.iterator().next().getDatanodeDetails()) .setEmpty(true) .build(); // Update replica containerStateManager.updateContainerReplica(replicaOne); // Check replica updated with wrong keyCount scm.getContainerManager().getContainerReplicas( ContainerID.valueOf(containerInfos.get(0).getContainerID())) .stream().anyMatch(replica -> replica.getKeyCount() == 10); // Process delete container in SCM, ensure containers gets deleted, // even though keyCount is invalid in one of the replica GenericTestUtils.waitFor(() -> { replicationManager.processAll(); ((EventQueue)scm.getEventQueue()).processAll(1000); List<ContainerInfo> infos = scm.getContainerManager().getContainers(); try { infos.stream().forEach(container -> { assertEquals(HddsProtos.LifeCycleState.DELETED, container.getState()); try { scm.getScmHAManager().asSCMHADBTransactionBuffer().flush(); assertEquals(HddsProtos.LifeCycleState.DELETED, scm.getScmMetadataStore().getContainerTable() .get(container.containerID()).getState()); } catch (IOException e) { fail( "Container from SCM DB should be marked as DELETED"); } }); } catch (Throwable e) { LOG.info(e.getMessage()); return false; } return true; }, 500, 30000); } private void verifyTransactionsCommitted() throws IOException { scm.getScmBlockManager().getDeletedBlockLog(); for (long txnID = 1; txnID <= maxTransactionId; txnID++) { assertNull( scm.getScmMetadataStore().getDeletedBlocksTXTable().get(txnID)); } } private void matchContainerTransactionIds() throws IOException { for (HddsDatanodeService datanode : cluster.getHddsDatanodes()) { ContainerSet dnContainerSet = datanode.getDatanodeStateMachine().getContainer().getContainerSet(); List<ContainerData> containerDataList = new ArrayList<>(); dnContainerSet.listContainer(0, 10000, containerDataList); for (ContainerData containerData : containerDataList) { long containerId = containerData.getContainerID(); if (containerIdsWithDeletedBlocks.contains(containerId)) { assertThat(scm.getContainerInfo(containerId).getDeleteTransactionId()) .isGreaterThan(0); maxTransactionId = max(maxTransactionId, scm.getContainerInfo(containerId).getDeleteTransactionId()); } else { assertEquals( scm.getContainerInfo(containerId).getDeleteTransactionId(), 0); } assertEquals( ((KeyValueContainerData) dnContainerSet.getContainer(containerId) .getContainerData()).getDeleteTransactionId(), scm.getContainerInfo(containerId).getDeleteTransactionId()); } } } private void verifyBlocksCreated( List<OmKeyLocationInfoGroup> omKeyLocationInfoGroups) throws Exception { for (HddsDatanodeService datanode : cluster.getHddsDatanodes()) { ContainerSet dnContainerSet = datanode.getDatanodeStateMachine().getContainer().getContainerSet(); OzoneTestUtils.performOperationOnKeyContainers((blockID) -> { KeyValueContainerData cData = (KeyValueContainerData) dnContainerSet .getContainer(blockID.getContainerID()).getContainerData(); try (DBHandle db = BlockUtils.getDB(cData, conf)) { assertNotNull(db.getStore().getBlockDataTable() .get(cData.getBlockKey(blockID.getLocalID()))); } }, omKeyLocationInfoGroups); } } private void verifyBlocksDeleted( List<OmKeyLocationInfoGroup> omKeyLocationInfoGroups) throws Exception { for (HddsDatanodeService datanode : cluster.getHddsDatanodes()) { ContainerSet dnContainerSet = datanode.getDatanodeStateMachine().getContainer().getContainerSet(); OzoneTestUtils.performOperationOnKeyContainers((blockID) -> { KeyValueContainerData cData = (KeyValueContainerData) dnContainerSet .getContainer(blockID.getContainerID()).getContainerData(); try (DBHandle db = BlockUtils.getDB(cData, conf)) { Table<String, BlockData> blockDataTable = db.getStore().getBlockDataTable(); String blockKey = cData.getBlockKey(blockID.getLocalID()); BlockData blockData = blockDataTable.get(blockKey); assertNull(blockData); String deletingKey = cData.getDeletingBlockKey( blockID.getLocalID()); assertNull(blockDataTable.get(deletingKey)); } containerIdsWithDeletedBlocks.add(blockID.getContainerID()); }, omKeyLocationInfoGroups); } } @Test public void testBlockDeleteCommandParallelProcess() throws Exception { String volumeName = UUID.randomUUID().toString(); String bucketName = UUID.randomUUID().toString(); String value = RandomStringUtils.secure().next(64 * 1024); store.createVolume(volumeName); OzoneVolume volume = store.getVolume(volumeName); volume.createBucket(bucketName); OzoneBucket bucket = volume.getBucket(bucketName); int keyCount = 10; List<String> keys = new ArrayList<>(); for (int j = 0; j < keyCount; j++) { String keyName = UUID.randomUUID().toString(); OzoneOutputStream out = bucket.createKey(keyName, value.getBytes(UTF_8).length, ReplicationType.RATIS, ReplicationFactor.THREE, new HashMap<>()); out.write(value.getBytes(UTF_8)); out.close(); keys.add(keyName); } // close the containers which hold the blocks for the key OzoneTestUtils.closeAllContainers(scm.getEventQueue(), scm); Thread.sleep(2000); for (int j = 0; j < keyCount; j++) { OmKeyArgs keyArgs = new OmKeyArgs.Builder().setVolumeName(volumeName) .setBucketName(bucketName).setKeyName(keys.get(j)).setDataSize(0) .setReplicationConfig( RatisReplicationConfig .getInstance(HddsProtos.ReplicationFactor.THREE)) .build(); writeClient.deleteKey(keyArgs); } // Wait for block delete command sent from OM OzoneTestUtils.flushAndWaitForDeletedBlockLog(scm); long start = Time.monotonicNow(); // Wait for all blocks been deleted. GenericTestUtils.waitFor(() -> { try { if (scm.getScmBlockManager().getDeletedBlockLog() .getNumOfValidTransactions() == 0) { return true; } } catch (IOException e) { } return false; }, 100, 30000); long end = Time.monotonicNow(); System.out.println("Block deletion costs " + (end - start) + "ms"); } }
googleapis/google-api-java-client-services
35,168
clients/google-api-services-reseller/v1/1.26.0/com/google/api/services/reseller/model/Subscription.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.reseller.model; /** * JSON template for a subscription. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Enterprise Apps Reseller API. For a detailed * explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class Subscription extends com.google.api.client.json.GenericJson { /** * Read-only field that returns the current billing method for a subscription. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String billingMethod; /** * The creationTime property is the date when subscription was created. It is in milliseconds * using the Epoch format. See an example Epoch converter. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long creationTime; /** * Primary domain name of the customer * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String customerDomain; /** * This property will always be returned in a response as the unique identifier generated by * Google. In a request, this property can be either the primary domain or the unique identifier * generated by Google. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String customerId; /** * Google-issued code (100 char max) for discounted pricing on subscription plans. Deal code must * be included in insert requests in order to receive discounted rate. This property is optional, * regular pricing applies if left empty. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String dealCode; /** * Identifies the resource as a Subscription. Value: reseller#subscription * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String kind; /** * The plan property is required. In this version of the API, the G Suite plans are the flexible * plan, annual commitment plan, and the 30-day free trial plan. For more information about the * API"s payment plans, see the API concepts. * The value may be {@code null}. */ @com.google.api.client.util.Key private Plan plan; /** * This is an optional property. This purchase order (PO) information is for resellers to use for * their company tracking usage. If a purchaseOrderId value is given it appears in the API * responses and shows up in the invoice. The property accepts up to 80 plain text characters. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String purchaseOrderId; /** * Renewal settings for the annual commitment plan. For more detailed information, see renewal * options in the administrator help center. * The value may be {@code null}. */ @com.google.api.client.util.Key private RenewalSettings renewalSettings; /** * URL to customer's Subscriptions page in the Admin console. The read-only URL is generated by * the API service. This is used if your client application requires the customer to complete a * task using the Subscriptions page in the Admin console. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String resourceUiUrl; /** * This is a required property. The number and limit of user seat licenses in the plan. * The value may be {@code null}. */ @com.google.api.client.util.Key private Seats seats; /** * A required property. The skuId is a unique system identifier for a product's SKU assigned to a * customer in the subscription. For products and SKUs available in this version of the API, see * Product and SKU IDs. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String skuId; /** * Read-only external display name for a product's SKU assigned to a customer in the subscription. * SKU names are subject to change at Google's discretion. For products and SKUs available in this * version of the API, see Product and SKU IDs. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String skuName; /** * This is an optional property. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String status; /** * The subscriptionId is the subscription identifier and is unique for each customer. This is a * required property. Since a subscriptionId changes when a subscription is updated, we recommend * not using this ID as a key for persistent data. Use the subscriptionId as described in retrieve * all reseller subscriptions. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String subscriptionId; /** * Read-only field containing an enumerable of all the current suspension reasons for a * subscription. It is possible for a subscription to have many concurrent, overlapping suspension * reasons. A subscription's STATUS is SUSPENDED until all pending suspensions are removed. * * Possible options include: - PENDING_TOS_ACCEPTANCE - The customer has not logged in and * accepted the G Suite Resold Terms of Services. - RENEWAL_WITH_TYPE_CANCEL - The customer's * commitment ended and their service was cancelled at the end of their term. - * RESELLER_INITIATED - A manual suspension invoked by a Reseller. - TRIAL_ENDED - The * customer's trial expired without a plan selected. - OTHER - The customer is suspended for an * internal Google reason (e.g. abuse or otherwise). * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> suspensionReasons; /** * Read-only transfer related information for the subscription. For more information, see retrieve * transferable subscriptions for a customer. * The value may be {@code null}. */ @com.google.api.client.util.Key private TransferInfo transferInfo; /** * The G Suite annual commitment and flexible payment plans can be in a 30-day free trial. For * more information, see the API concepts. * The value may be {@code null}. */ @com.google.api.client.util.Key private TrialSettings trialSettings; /** * Read-only field that returns the current billing method for a subscription. * @return value or {@code null} for none */ public java.lang.String getBillingMethod() { return billingMethod; } /** * Read-only field that returns the current billing method for a subscription. * @param billingMethod billingMethod or {@code null} for none */ public Subscription setBillingMethod(java.lang.String billingMethod) { this.billingMethod = billingMethod; return this; } /** * The creationTime property is the date when subscription was created. It is in milliseconds * using the Epoch format. See an example Epoch converter. * @return value or {@code null} for none */ public java.lang.Long getCreationTime() { return creationTime; } /** * The creationTime property is the date when subscription was created. It is in milliseconds * using the Epoch format. See an example Epoch converter. * @param creationTime creationTime or {@code null} for none */ public Subscription setCreationTime(java.lang.Long creationTime) { this.creationTime = creationTime; return this; } /** * Primary domain name of the customer * @return value or {@code null} for none */ public java.lang.String getCustomerDomain() { return customerDomain; } /** * Primary domain name of the customer * @param customerDomain customerDomain or {@code null} for none */ public Subscription setCustomerDomain(java.lang.String customerDomain) { this.customerDomain = customerDomain; return this; } /** * This property will always be returned in a response as the unique identifier generated by * Google. In a request, this property can be either the primary domain or the unique identifier * generated by Google. * @return value or {@code null} for none */ public java.lang.String getCustomerId() { return customerId; } /** * This property will always be returned in a response as the unique identifier generated by * Google. In a request, this property can be either the primary domain or the unique identifier * generated by Google. * @param customerId customerId or {@code null} for none */ public Subscription setCustomerId(java.lang.String customerId) { this.customerId = customerId; return this; } /** * Google-issued code (100 char max) for discounted pricing on subscription plans. Deal code must * be included in insert requests in order to receive discounted rate. This property is optional, * regular pricing applies if left empty. * @return value or {@code null} for none */ public java.lang.String getDealCode() { return dealCode; } /** * Google-issued code (100 char max) for discounted pricing on subscription plans. Deal code must * be included in insert requests in order to receive discounted rate. This property is optional, * regular pricing applies if left empty. * @param dealCode dealCode or {@code null} for none */ public Subscription setDealCode(java.lang.String dealCode) { this.dealCode = dealCode; return this; } /** * Identifies the resource as a Subscription. Value: reseller#subscription * @return value or {@code null} for none */ public java.lang.String getKind() { return kind; } /** * Identifies the resource as a Subscription. Value: reseller#subscription * @param kind kind or {@code null} for none */ public Subscription setKind(java.lang.String kind) { this.kind = kind; return this; } /** * The plan property is required. In this version of the API, the G Suite plans are the flexible * plan, annual commitment plan, and the 30-day free trial plan. For more information about the * API"s payment plans, see the API concepts. * @return value or {@code null} for none */ public Plan getPlan() { return plan; } /** * The plan property is required. In this version of the API, the G Suite plans are the flexible * plan, annual commitment plan, and the 30-day free trial plan. For more information about the * API"s payment plans, see the API concepts. * @param plan plan or {@code null} for none */ public Subscription setPlan(Plan plan) { this.plan = plan; return this; } /** * This is an optional property. This purchase order (PO) information is for resellers to use for * their company tracking usage. If a purchaseOrderId value is given it appears in the API * responses and shows up in the invoice. The property accepts up to 80 plain text characters. * @return value or {@code null} for none */ public java.lang.String getPurchaseOrderId() { return purchaseOrderId; } /** * This is an optional property. This purchase order (PO) information is for resellers to use for * their company tracking usage. If a purchaseOrderId value is given it appears in the API * responses and shows up in the invoice. The property accepts up to 80 plain text characters. * @param purchaseOrderId purchaseOrderId or {@code null} for none */ public Subscription setPurchaseOrderId(java.lang.String purchaseOrderId) { this.purchaseOrderId = purchaseOrderId; return this; } /** * Renewal settings for the annual commitment plan. For more detailed information, see renewal * options in the administrator help center. * @return value or {@code null} for none */ public RenewalSettings getRenewalSettings() { return renewalSettings; } /** * Renewal settings for the annual commitment plan. For more detailed information, see renewal * options in the administrator help center. * @param renewalSettings renewalSettings or {@code null} for none */ public Subscription setRenewalSettings(RenewalSettings renewalSettings) { this.renewalSettings = renewalSettings; return this; } /** * URL to customer's Subscriptions page in the Admin console. The read-only URL is generated by * the API service. This is used if your client application requires the customer to complete a * task using the Subscriptions page in the Admin console. * @return value or {@code null} for none */ public java.lang.String getResourceUiUrl() { return resourceUiUrl; } /** * URL to customer's Subscriptions page in the Admin console. The read-only URL is generated by * the API service. This is used if your client application requires the customer to complete a * task using the Subscriptions page in the Admin console. * @param resourceUiUrl resourceUiUrl or {@code null} for none */ public Subscription setResourceUiUrl(java.lang.String resourceUiUrl) { this.resourceUiUrl = resourceUiUrl; return this; } /** * This is a required property. The number and limit of user seat licenses in the plan. * @return value or {@code null} for none */ public Seats getSeats() { return seats; } /** * This is a required property. The number and limit of user seat licenses in the plan. * @param seats seats or {@code null} for none */ public Subscription setSeats(Seats seats) { this.seats = seats; return this; } /** * A required property. The skuId is a unique system identifier for a product's SKU assigned to a * customer in the subscription. For products and SKUs available in this version of the API, see * Product and SKU IDs. * @return value or {@code null} for none */ public java.lang.String getSkuId() { return skuId; } /** * A required property. The skuId is a unique system identifier for a product's SKU assigned to a * customer in the subscription. For products and SKUs available in this version of the API, see * Product and SKU IDs. * @param skuId skuId or {@code null} for none */ public Subscription setSkuId(java.lang.String skuId) { this.skuId = skuId; return this; } /** * Read-only external display name for a product's SKU assigned to a customer in the subscription. * SKU names are subject to change at Google's discretion. For products and SKUs available in this * version of the API, see Product and SKU IDs. * @return value or {@code null} for none */ public java.lang.String getSkuName() { return skuName; } /** * Read-only external display name for a product's SKU assigned to a customer in the subscription. * SKU names are subject to change at Google's discretion. For products and SKUs available in this * version of the API, see Product and SKU IDs. * @param skuName skuName or {@code null} for none */ public Subscription setSkuName(java.lang.String skuName) { this.skuName = skuName; return this; } /** * This is an optional property. * @return value or {@code null} for none */ public java.lang.String getStatus() { return status; } /** * This is an optional property. * @param status status or {@code null} for none */ public Subscription setStatus(java.lang.String status) { this.status = status; return this; } /** * The subscriptionId is the subscription identifier and is unique for each customer. This is a * required property. Since a subscriptionId changes when a subscription is updated, we recommend * not using this ID as a key for persistent data. Use the subscriptionId as described in retrieve * all reseller subscriptions. * @return value or {@code null} for none */ public java.lang.String getSubscriptionId() { return subscriptionId; } /** * The subscriptionId is the subscription identifier and is unique for each customer. This is a * required property. Since a subscriptionId changes when a subscription is updated, we recommend * not using this ID as a key for persistent data. Use the subscriptionId as described in retrieve * all reseller subscriptions. * @param subscriptionId subscriptionId or {@code null} for none */ public Subscription setSubscriptionId(java.lang.String subscriptionId) { this.subscriptionId = subscriptionId; return this; } /** * Read-only field containing an enumerable of all the current suspension reasons for a * subscription. It is possible for a subscription to have many concurrent, overlapping suspension * reasons. A subscription's STATUS is SUSPENDED until all pending suspensions are removed. * * Possible options include: - PENDING_TOS_ACCEPTANCE - The customer has not logged in and * accepted the G Suite Resold Terms of Services. - RENEWAL_WITH_TYPE_CANCEL - The customer's * commitment ended and their service was cancelled at the end of their term. - * RESELLER_INITIATED - A manual suspension invoked by a Reseller. - TRIAL_ENDED - The * customer's trial expired without a plan selected. - OTHER - The customer is suspended for an * internal Google reason (e.g. abuse or otherwise). * @return value or {@code null} for none */ public java.util.List<java.lang.String> getSuspensionReasons() { return suspensionReasons; } /** * Read-only field containing an enumerable of all the current suspension reasons for a * subscription. It is possible for a subscription to have many concurrent, overlapping suspension * reasons. A subscription's STATUS is SUSPENDED until all pending suspensions are removed. * * Possible options include: - PENDING_TOS_ACCEPTANCE - The customer has not logged in and * accepted the G Suite Resold Terms of Services. - RENEWAL_WITH_TYPE_CANCEL - The customer's * commitment ended and their service was cancelled at the end of their term. - * RESELLER_INITIATED - A manual suspension invoked by a Reseller. - TRIAL_ENDED - The * customer's trial expired without a plan selected. - OTHER - The customer is suspended for an * internal Google reason (e.g. abuse or otherwise). * @param suspensionReasons suspensionReasons or {@code null} for none */ public Subscription setSuspensionReasons(java.util.List<java.lang.String> suspensionReasons) { this.suspensionReasons = suspensionReasons; return this; } /** * Read-only transfer related information for the subscription. For more information, see retrieve * transferable subscriptions for a customer. * @return value or {@code null} for none */ public TransferInfo getTransferInfo() { return transferInfo; } /** * Read-only transfer related information for the subscription. For more information, see retrieve * transferable subscriptions for a customer. * @param transferInfo transferInfo or {@code null} for none */ public Subscription setTransferInfo(TransferInfo transferInfo) { this.transferInfo = transferInfo; return this; } /** * The G Suite annual commitment and flexible payment plans can be in a 30-day free trial. For * more information, see the API concepts. * @return value or {@code null} for none */ public TrialSettings getTrialSettings() { return trialSettings; } /** * The G Suite annual commitment and flexible payment plans can be in a 30-day free trial. For * more information, see the API concepts. * @param trialSettings trialSettings or {@code null} for none */ public Subscription setTrialSettings(TrialSettings trialSettings) { this.trialSettings = trialSettings; return this; } @Override public Subscription set(String fieldName, Object value) { return (Subscription) super.set(fieldName, value); } @Override public Subscription clone() { return (Subscription) super.clone(); } /** * The plan property is required. In this version of the API, the G Suite plans are the flexible * plan, annual commitment plan, and the 30-day free trial plan. For more information about the * API"s payment plans, see the API concepts. */ public static final class Plan extends com.google.api.client.json.GenericJson { /** * In this version of the API, annual commitment plan's interval is one year. Note: When * billingMethod value is OFFLINE, the subscription property object plan.commitmentInterval is * omitted in all API responses. * The value may be {@code null}. */ @com.google.api.client.util.Key private CommitmentInterval commitmentInterval; /** * The isCommitmentPlan property's boolean value identifies the plan as an annual commitment plan: * - true — The subscription's plan is an annual commitment plan. - false — The plan is not an * annual commitment plan. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean isCommitmentPlan; /** * The planName property is required. This is the name of the subscription's plan. For more * information about the Google payment plans, see the API concepts. * * Possible values are: - ANNUAL_MONTHLY_PAY — The annual commitment plan with monthly payments. * Caution: ANNUAL_MONTHLY_PAY is returned as ANNUAL in all API responses. - ANNUAL_YEARLY_PAY — * The annual commitment plan with yearly payments - FLEXIBLE — The flexible plan - TRIAL — * The 30-day free trial plan. A subscription in trial will be suspended after the 30th free day * if no payment plan is assigned. Calling changePlan will assign a payment plan to a trial but * will not activate the plan. A trial will automatically begin its assigned payment plan after * its 30th free day or immediately after calling startPaidService. - FREE — The free plan is * exclusive to the Cloud Identity SKU and does not incur any billing. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String planName; /** * In this version of the API, annual commitment plan's interval is one year. Note: When * billingMethod value is OFFLINE, the subscription property object plan.commitmentInterval is * omitted in all API responses. * @return value or {@code null} for none */ public CommitmentInterval getCommitmentInterval() { return commitmentInterval; } /** * In this version of the API, annual commitment plan's interval is one year. Note: When * billingMethod value is OFFLINE, the subscription property object plan.commitmentInterval is * omitted in all API responses. * @param commitmentInterval commitmentInterval or {@code null} for none */ public Plan setCommitmentInterval(CommitmentInterval commitmentInterval) { this.commitmentInterval = commitmentInterval; return this; } /** * The isCommitmentPlan property's boolean value identifies the plan as an annual commitment plan: * - true — The subscription's plan is an annual commitment plan. - false — The plan is not an * annual commitment plan. * @return value or {@code null} for none */ public java.lang.Boolean getIsCommitmentPlan() { return isCommitmentPlan; } /** * The isCommitmentPlan property's boolean value identifies the plan as an annual commitment plan: * - true — The subscription's plan is an annual commitment plan. - false — The plan is not an * annual commitment plan. * @param isCommitmentPlan isCommitmentPlan or {@code null} for none */ public Plan setIsCommitmentPlan(java.lang.Boolean isCommitmentPlan) { this.isCommitmentPlan = isCommitmentPlan; return this; } /** * The planName property is required. This is the name of the subscription's plan. For more * information about the Google payment plans, see the API concepts. * * Possible values are: - ANNUAL_MONTHLY_PAY — The annual commitment plan with monthly payments. * Caution: ANNUAL_MONTHLY_PAY is returned as ANNUAL in all API responses. - ANNUAL_YEARLY_PAY — * The annual commitment plan with yearly payments - FLEXIBLE — The flexible plan - TRIAL — * The 30-day free trial plan. A subscription in trial will be suspended after the 30th free day * if no payment plan is assigned. Calling changePlan will assign a payment plan to a trial but * will not activate the plan. A trial will automatically begin its assigned payment plan after * its 30th free day or immediately after calling startPaidService. - FREE — The free plan is * exclusive to the Cloud Identity SKU and does not incur any billing. * @return value or {@code null} for none */ public java.lang.String getPlanName() { return planName; } /** * The planName property is required. This is the name of the subscription's plan. For more * information about the Google payment plans, see the API concepts. * * Possible values are: - ANNUAL_MONTHLY_PAY — The annual commitment plan with monthly payments. * Caution: ANNUAL_MONTHLY_PAY is returned as ANNUAL in all API responses. - ANNUAL_YEARLY_PAY — * The annual commitment plan with yearly payments - FLEXIBLE — The flexible plan - TRIAL — * The 30-day free trial plan. A subscription in trial will be suspended after the 30th free day * if no payment plan is assigned. Calling changePlan will assign a payment plan to a trial but * will not activate the plan. A trial will automatically begin its assigned payment plan after * its 30th free day or immediately after calling startPaidService. - FREE — The free plan is * exclusive to the Cloud Identity SKU and does not incur any billing. * @param planName planName or {@code null} for none */ public Plan setPlanName(java.lang.String planName) { this.planName = planName; return this; } @Override public Plan set(String fieldName, Object value) { return (Plan) super.set(fieldName, value); } @Override public Plan clone() { return (Plan) super.clone(); } /** * In this version of the API, annual commitment plan's interval is one year. Note: When * billingMethod value is OFFLINE, the subscription property object plan.commitmentInterval is * omitted in all API responses. */ public static final class CommitmentInterval extends com.google.api.client.json.GenericJson { /** * An annual commitment plan's interval's endTime in milliseconds using the UNIX Epoch format. See * an example Epoch converter. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long endTime; /** * An annual commitment plan's interval's startTime in milliseconds using UNIX Epoch format. See * an example Epoch converter. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long startTime; /** * An annual commitment plan's interval's endTime in milliseconds using the UNIX Epoch format. See * an example Epoch converter. * @return value or {@code null} for none */ public java.lang.Long getEndTime() { return endTime; } /** * An annual commitment plan's interval's endTime in milliseconds using the UNIX Epoch format. See * an example Epoch converter. * @param endTime endTime or {@code null} for none */ public CommitmentInterval setEndTime(java.lang.Long endTime) { this.endTime = endTime; return this; } /** * An annual commitment plan's interval's startTime in milliseconds using UNIX Epoch format. See * an example Epoch converter. * @return value or {@code null} for none */ public java.lang.Long getStartTime() { return startTime; } /** * An annual commitment plan's interval's startTime in milliseconds using UNIX Epoch format. See * an example Epoch converter. * @param startTime startTime or {@code null} for none */ public CommitmentInterval setStartTime(java.lang.Long startTime) { this.startTime = startTime; return this; } @Override public CommitmentInterval set(String fieldName, Object value) { return (CommitmentInterval) super.set(fieldName, value); } @Override public CommitmentInterval clone() { return (CommitmentInterval) super.clone(); } } } /** * Read-only transfer related information for the subscription. For more information, see retrieve * transferable subscriptions for a customer. */ public static final class TransferInfo extends com.google.api.client.json.GenericJson { /** * When inserting a subscription, this is the minimum number of seats listed in the transfer order * for this product. For example, if the customer has 20 users, the reseller cannot place a * transfer order of 15 seats. The minimum is 20 seats. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer minimumTransferableSeats; /** * The time when transfer token or intent to transfer will expire. The time is in milliseconds * using UNIX Epoch format. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long transferabilityExpirationTime; /** * When inserting a subscription, this is the minimum number of seats listed in the transfer order * for this product. For example, if the customer has 20 users, the reseller cannot place a * transfer order of 15 seats. The minimum is 20 seats. * @return value or {@code null} for none */ public java.lang.Integer getMinimumTransferableSeats() { return minimumTransferableSeats; } /** * When inserting a subscription, this is the minimum number of seats listed in the transfer order * for this product. For example, if the customer has 20 users, the reseller cannot place a * transfer order of 15 seats. The minimum is 20 seats. * @param minimumTransferableSeats minimumTransferableSeats or {@code null} for none */ public TransferInfo setMinimumTransferableSeats(java.lang.Integer minimumTransferableSeats) { this.minimumTransferableSeats = minimumTransferableSeats; return this; } /** * The time when transfer token or intent to transfer will expire. The time is in milliseconds * using UNIX Epoch format. * @return value or {@code null} for none */ public java.lang.Long getTransferabilityExpirationTime() { return transferabilityExpirationTime; } /** * The time when transfer token or intent to transfer will expire. The time is in milliseconds * using UNIX Epoch format. * @param transferabilityExpirationTime transferabilityExpirationTime or {@code null} for none */ public TransferInfo setTransferabilityExpirationTime(java.lang.Long transferabilityExpirationTime) { this.transferabilityExpirationTime = transferabilityExpirationTime; return this; } @Override public TransferInfo set(String fieldName, Object value) { return (TransferInfo) super.set(fieldName, value); } @Override public TransferInfo clone() { return (TransferInfo) super.clone(); } } /** * The G Suite annual commitment and flexible payment plans can be in a 30-day free trial. For more * information, see the API concepts. */ public static final class TrialSettings extends com.google.api.client.json.GenericJson { /** * Determines if a subscription's plan is in a 30-day free trial or not: - true — The plan is in * trial. - false — The plan is not in trial. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean isInTrial; /** * Date when the trial ends. The value is in milliseconds using the UNIX Epoch format. See an * example Epoch converter. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long trialEndTime; /** * Determines if a subscription's plan is in a 30-day free trial or not: - true — The plan is in * trial. - false — The plan is not in trial. * @return value or {@code null} for none */ public java.lang.Boolean getIsInTrial() { return isInTrial; } /** * Determines if a subscription's plan is in a 30-day free trial or not: - true — The plan is in * trial. - false — The plan is not in trial. * @param isInTrial isInTrial or {@code null} for none */ public TrialSettings setIsInTrial(java.lang.Boolean isInTrial) { this.isInTrial = isInTrial; return this; } /** * Date when the trial ends. The value is in milliseconds using the UNIX Epoch format. See an * example Epoch converter. * @return value or {@code null} for none */ public java.lang.Long getTrialEndTime() { return trialEndTime; } /** * Date when the trial ends. The value is in milliseconds using the UNIX Epoch format. See an * example Epoch converter. * @param trialEndTime trialEndTime or {@code null} for none */ public TrialSettings setTrialEndTime(java.lang.Long trialEndTime) { this.trialEndTime = trialEndTime; return this; } @Override public TrialSettings set(String fieldName, Object value) { return (TrialSettings) super.set(fieldName, value); } @Override public TrialSettings clone() { return (TrialSettings) super.clone(); } } }
googleapis/google-api-java-client-services
35,168
clients/google-api-services-reseller/v1/1.27.0/com/google/api/services/reseller/model/Subscription.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.reseller.model; /** * JSON template for a subscription. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Enterprise Apps Reseller API. For a detailed * explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class Subscription extends com.google.api.client.json.GenericJson { /** * Read-only field that returns the current billing method for a subscription. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String billingMethod; /** * The creationTime property is the date when subscription was created. It is in milliseconds * using the Epoch format. See an example Epoch converter. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long creationTime; /** * Primary domain name of the customer * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String customerDomain; /** * This property will always be returned in a response as the unique identifier generated by * Google. In a request, this property can be either the primary domain or the unique identifier * generated by Google. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String customerId; /** * Google-issued code (100 char max) for discounted pricing on subscription plans. Deal code must * be included in insert requests in order to receive discounted rate. This property is optional, * regular pricing applies if left empty. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String dealCode; /** * Identifies the resource as a Subscription. Value: reseller#subscription * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String kind; /** * The plan property is required. In this version of the API, the G Suite plans are the flexible * plan, annual commitment plan, and the 30-day free trial plan. For more information about the * API"s payment plans, see the API concepts. * The value may be {@code null}. */ @com.google.api.client.util.Key private Plan plan; /** * This is an optional property. This purchase order (PO) information is for resellers to use for * their company tracking usage. If a purchaseOrderId value is given it appears in the API * responses and shows up in the invoice. The property accepts up to 80 plain text characters. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String purchaseOrderId; /** * Renewal settings for the annual commitment plan. For more detailed information, see renewal * options in the administrator help center. * The value may be {@code null}. */ @com.google.api.client.util.Key private RenewalSettings renewalSettings; /** * URL to customer's Subscriptions page in the Admin console. The read-only URL is generated by * the API service. This is used if your client application requires the customer to complete a * task using the Subscriptions page in the Admin console. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String resourceUiUrl; /** * This is a required property. The number and limit of user seat licenses in the plan. * The value may be {@code null}. */ @com.google.api.client.util.Key private Seats seats; /** * A required property. The skuId is a unique system identifier for a product's SKU assigned to a * customer in the subscription. For products and SKUs available in this version of the API, see * Product and SKU IDs. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String skuId; /** * Read-only external display name for a product's SKU assigned to a customer in the subscription. * SKU names are subject to change at Google's discretion. For products and SKUs available in this * version of the API, see Product and SKU IDs. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String skuName; /** * This is an optional property. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String status; /** * The subscriptionId is the subscription identifier and is unique for each customer. This is a * required property. Since a subscriptionId changes when a subscription is updated, we recommend * not using this ID as a key for persistent data. Use the subscriptionId as described in retrieve * all reseller subscriptions. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String subscriptionId; /** * Read-only field containing an enumerable of all the current suspension reasons for a * subscription. It is possible for a subscription to have many concurrent, overlapping suspension * reasons. A subscription's STATUS is SUSPENDED until all pending suspensions are removed. * * Possible options include: - PENDING_TOS_ACCEPTANCE - The customer has not logged in and * accepted the G Suite Resold Terms of Services. - RENEWAL_WITH_TYPE_CANCEL - The customer's * commitment ended and their service was cancelled at the end of their term. - * RESELLER_INITIATED - A manual suspension invoked by a Reseller. - TRIAL_ENDED - The * customer's trial expired without a plan selected. - OTHER - The customer is suspended for an * internal Google reason (e.g. abuse or otherwise). * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> suspensionReasons; /** * Read-only transfer related information for the subscription. For more information, see retrieve * transferable subscriptions for a customer. * The value may be {@code null}. */ @com.google.api.client.util.Key private TransferInfo transferInfo; /** * The G Suite annual commitment and flexible payment plans can be in a 30-day free trial. For * more information, see the API concepts. * The value may be {@code null}. */ @com.google.api.client.util.Key private TrialSettings trialSettings; /** * Read-only field that returns the current billing method for a subscription. * @return value or {@code null} for none */ public java.lang.String getBillingMethod() { return billingMethod; } /** * Read-only field that returns the current billing method for a subscription. * @param billingMethod billingMethod or {@code null} for none */ public Subscription setBillingMethod(java.lang.String billingMethod) { this.billingMethod = billingMethod; return this; } /** * The creationTime property is the date when subscription was created. It is in milliseconds * using the Epoch format. See an example Epoch converter. * @return value or {@code null} for none */ public java.lang.Long getCreationTime() { return creationTime; } /** * The creationTime property is the date when subscription was created. It is in milliseconds * using the Epoch format. See an example Epoch converter. * @param creationTime creationTime or {@code null} for none */ public Subscription setCreationTime(java.lang.Long creationTime) { this.creationTime = creationTime; return this; } /** * Primary domain name of the customer * @return value or {@code null} for none */ public java.lang.String getCustomerDomain() { return customerDomain; } /** * Primary domain name of the customer * @param customerDomain customerDomain or {@code null} for none */ public Subscription setCustomerDomain(java.lang.String customerDomain) { this.customerDomain = customerDomain; return this; } /** * This property will always be returned in a response as the unique identifier generated by * Google. In a request, this property can be either the primary domain or the unique identifier * generated by Google. * @return value or {@code null} for none */ public java.lang.String getCustomerId() { return customerId; } /** * This property will always be returned in a response as the unique identifier generated by * Google. In a request, this property can be either the primary domain or the unique identifier * generated by Google. * @param customerId customerId or {@code null} for none */ public Subscription setCustomerId(java.lang.String customerId) { this.customerId = customerId; return this; } /** * Google-issued code (100 char max) for discounted pricing on subscription plans. Deal code must * be included in insert requests in order to receive discounted rate. This property is optional, * regular pricing applies if left empty. * @return value or {@code null} for none */ public java.lang.String getDealCode() { return dealCode; } /** * Google-issued code (100 char max) for discounted pricing on subscription plans. Deal code must * be included in insert requests in order to receive discounted rate. This property is optional, * regular pricing applies if left empty. * @param dealCode dealCode or {@code null} for none */ public Subscription setDealCode(java.lang.String dealCode) { this.dealCode = dealCode; return this; } /** * Identifies the resource as a Subscription. Value: reseller#subscription * @return value or {@code null} for none */ public java.lang.String getKind() { return kind; } /** * Identifies the resource as a Subscription. Value: reseller#subscription * @param kind kind or {@code null} for none */ public Subscription setKind(java.lang.String kind) { this.kind = kind; return this; } /** * The plan property is required. In this version of the API, the G Suite plans are the flexible * plan, annual commitment plan, and the 30-day free trial plan. For more information about the * API"s payment plans, see the API concepts. * @return value or {@code null} for none */ public Plan getPlan() { return plan; } /** * The plan property is required. In this version of the API, the G Suite plans are the flexible * plan, annual commitment plan, and the 30-day free trial plan. For more information about the * API"s payment plans, see the API concepts. * @param plan plan or {@code null} for none */ public Subscription setPlan(Plan plan) { this.plan = plan; return this; } /** * This is an optional property. This purchase order (PO) information is for resellers to use for * their company tracking usage. If a purchaseOrderId value is given it appears in the API * responses and shows up in the invoice. The property accepts up to 80 plain text characters. * @return value or {@code null} for none */ public java.lang.String getPurchaseOrderId() { return purchaseOrderId; } /** * This is an optional property. This purchase order (PO) information is for resellers to use for * their company tracking usage. If a purchaseOrderId value is given it appears in the API * responses and shows up in the invoice. The property accepts up to 80 plain text characters. * @param purchaseOrderId purchaseOrderId or {@code null} for none */ public Subscription setPurchaseOrderId(java.lang.String purchaseOrderId) { this.purchaseOrderId = purchaseOrderId; return this; } /** * Renewal settings for the annual commitment plan. For more detailed information, see renewal * options in the administrator help center. * @return value or {@code null} for none */ public RenewalSettings getRenewalSettings() { return renewalSettings; } /** * Renewal settings for the annual commitment plan. For more detailed information, see renewal * options in the administrator help center. * @param renewalSettings renewalSettings or {@code null} for none */ public Subscription setRenewalSettings(RenewalSettings renewalSettings) { this.renewalSettings = renewalSettings; return this; } /** * URL to customer's Subscriptions page in the Admin console. The read-only URL is generated by * the API service. This is used if your client application requires the customer to complete a * task using the Subscriptions page in the Admin console. * @return value or {@code null} for none */ public java.lang.String getResourceUiUrl() { return resourceUiUrl; } /** * URL to customer's Subscriptions page in the Admin console. The read-only URL is generated by * the API service. This is used if your client application requires the customer to complete a * task using the Subscriptions page in the Admin console. * @param resourceUiUrl resourceUiUrl or {@code null} for none */ public Subscription setResourceUiUrl(java.lang.String resourceUiUrl) { this.resourceUiUrl = resourceUiUrl; return this; } /** * This is a required property. The number and limit of user seat licenses in the plan. * @return value or {@code null} for none */ public Seats getSeats() { return seats; } /** * This is a required property. The number and limit of user seat licenses in the plan. * @param seats seats or {@code null} for none */ public Subscription setSeats(Seats seats) { this.seats = seats; return this; } /** * A required property. The skuId is a unique system identifier for a product's SKU assigned to a * customer in the subscription. For products and SKUs available in this version of the API, see * Product and SKU IDs. * @return value or {@code null} for none */ public java.lang.String getSkuId() { return skuId; } /** * A required property. The skuId is a unique system identifier for a product's SKU assigned to a * customer in the subscription. For products and SKUs available in this version of the API, see * Product and SKU IDs. * @param skuId skuId or {@code null} for none */ public Subscription setSkuId(java.lang.String skuId) { this.skuId = skuId; return this; } /** * Read-only external display name for a product's SKU assigned to a customer in the subscription. * SKU names are subject to change at Google's discretion. For products and SKUs available in this * version of the API, see Product and SKU IDs. * @return value or {@code null} for none */ public java.lang.String getSkuName() { return skuName; } /** * Read-only external display name for a product's SKU assigned to a customer in the subscription. * SKU names are subject to change at Google's discretion. For products and SKUs available in this * version of the API, see Product and SKU IDs. * @param skuName skuName or {@code null} for none */ public Subscription setSkuName(java.lang.String skuName) { this.skuName = skuName; return this; } /** * This is an optional property. * @return value or {@code null} for none */ public java.lang.String getStatus() { return status; } /** * This is an optional property. * @param status status or {@code null} for none */ public Subscription setStatus(java.lang.String status) { this.status = status; return this; } /** * The subscriptionId is the subscription identifier and is unique for each customer. This is a * required property. Since a subscriptionId changes when a subscription is updated, we recommend * not using this ID as a key for persistent data. Use the subscriptionId as described in retrieve * all reseller subscriptions. * @return value or {@code null} for none */ public java.lang.String getSubscriptionId() { return subscriptionId; } /** * The subscriptionId is the subscription identifier and is unique for each customer. This is a * required property. Since a subscriptionId changes when a subscription is updated, we recommend * not using this ID as a key for persistent data. Use the subscriptionId as described in retrieve * all reseller subscriptions. * @param subscriptionId subscriptionId or {@code null} for none */ public Subscription setSubscriptionId(java.lang.String subscriptionId) { this.subscriptionId = subscriptionId; return this; } /** * Read-only field containing an enumerable of all the current suspension reasons for a * subscription. It is possible for a subscription to have many concurrent, overlapping suspension * reasons. A subscription's STATUS is SUSPENDED until all pending suspensions are removed. * * Possible options include: - PENDING_TOS_ACCEPTANCE - The customer has not logged in and * accepted the G Suite Resold Terms of Services. - RENEWAL_WITH_TYPE_CANCEL - The customer's * commitment ended and their service was cancelled at the end of their term. - * RESELLER_INITIATED - A manual suspension invoked by a Reseller. - TRIAL_ENDED - The * customer's trial expired without a plan selected. - OTHER - The customer is suspended for an * internal Google reason (e.g. abuse or otherwise). * @return value or {@code null} for none */ public java.util.List<java.lang.String> getSuspensionReasons() { return suspensionReasons; } /** * Read-only field containing an enumerable of all the current suspension reasons for a * subscription. It is possible for a subscription to have many concurrent, overlapping suspension * reasons. A subscription's STATUS is SUSPENDED until all pending suspensions are removed. * * Possible options include: - PENDING_TOS_ACCEPTANCE - The customer has not logged in and * accepted the G Suite Resold Terms of Services. - RENEWAL_WITH_TYPE_CANCEL - The customer's * commitment ended and their service was cancelled at the end of their term. - * RESELLER_INITIATED - A manual suspension invoked by a Reseller. - TRIAL_ENDED - The * customer's trial expired without a plan selected. - OTHER - The customer is suspended for an * internal Google reason (e.g. abuse or otherwise). * @param suspensionReasons suspensionReasons or {@code null} for none */ public Subscription setSuspensionReasons(java.util.List<java.lang.String> suspensionReasons) { this.suspensionReasons = suspensionReasons; return this; } /** * Read-only transfer related information for the subscription. For more information, see retrieve * transferable subscriptions for a customer. * @return value or {@code null} for none */ public TransferInfo getTransferInfo() { return transferInfo; } /** * Read-only transfer related information for the subscription. For more information, see retrieve * transferable subscriptions for a customer. * @param transferInfo transferInfo or {@code null} for none */ public Subscription setTransferInfo(TransferInfo transferInfo) { this.transferInfo = transferInfo; return this; } /** * The G Suite annual commitment and flexible payment plans can be in a 30-day free trial. For * more information, see the API concepts. * @return value or {@code null} for none */ public TrialSettings getTrialSettings() { return trialSettings; } /** * The G Suite annual commitment and flexible payment plans can be in a 30-day free trial. For * more information, see the API concepts. * @param trialSettings trialSettings or {@code null} for none */ public Subscription setTrialSettings(TrialSettings trialSettings) { this.trialSettings = trialSettings; return this; } @Override public Subscription set(String fieldName, Object value) { return (Subscription) super.set(fieldName, value); } @Override public Subscription clone() { return (Subscription) super.clone(); } /** * The plan property is required. In this version of the API, the G Suite plans are the flexible * plan, annual commitment plan, and the 30-day free trial plan. For more information about the * API"s payment plans, see the API concepts. */ public static final class Plan extends com.google.api.client.json.GenericJson { /** * In this version of the API, annual commitment plan's interval is one year. Note: When * billingMethod value is OFFLINE, the subscription property object plan.commitmentInterval is * omitted in all API responses. * The value may be {@code null}. */ @com.google.api.client.util.Key private CommitmentInterval commitmentInterval; /** * The isCommitmentPlan property's boolean value identifies the plan as an annual commitment plan: * - true — The subscription's plan is an annual commitment plan. - false — The plan is not an * annual commitment plan. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean isCommitmentPlan; /** * The planName property is required. This is the name of the subscription's plan. For more * information about the Google payment plans, see the API concepts. * * Possible values are: - ANNUAL_MONTHLY_PAY — The annual commitment plan with monthly payments. * Caution: ANNUAL_MONTHLY_PAY is returned as ANNUAL in all API responses. - ANNUAL_YEARLY_PAY — * The annual commitment plan with yearly payments - FLEXIBLE — The flexible plan - TRIAL — * The 30-day free trial plan. A subscription in trial will be suspended after the 30th free day * if no payment plan is assigned. Calling changePlan will assign a payment plan to a trial but * will not activate the plan. A trial will automatically begin its assigned payment plan after * its 30th free day or immediately after calling startPaidService. - FREE — The free plan is * exclusive to the Cloud Identity SKU and does not incur any billing. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String planName; /** * In this version of the API, annual commitment plan's interval is one year. Note: When * billingMethod value is OFFLINE, the subscription property object plan.commitmentInterval is * omitted in all API responses. * @return value or {@code null} for none */ public CommitmentInterval getCommitmentInterval() { return commitmentInterval; } /** * In this version of the API, annual commitment plan's interval is one year. Note: When * billingMethod value is OFFLINE, the subscription property object plan.commitmentInterval is * omitted in all API responses. * @param commitmentInterval commitmentInterval or {@code null} for none */ public Plan setCommitmentInterval(CommitmentInterval commitmentInterval) { this.commitmentInterval = commitmentInterval; return this; } /** * The isCommitmentPlan property's boolean value identifies the plan as an annual commitment plan: * - true — The subscription's plan is an annual commitment plan. - false — The plan is not an * annual commitment plan. * @return value or {@code null} for none */ public java.lang.Boolean getIsCommitmentPlan() { return isCommitmentPlan; } /** * The isCommitmentPlan property's boolean value identifies the plan as an annual commitment plan: * - true — The subscription's plan is an annual commitment plan. - false — The plan is not an * annual commitment plan. * @param isCommitmentPlan isCommitmentPlan or {@code null} for none */ public Plan setIsCommitmentPlan(java.lang.Boolean isCommitmentPlan) { this.isCommitmentPlan = isCommitmentPlan; return this; } /** * The planName property is required. This is the name of the subscription's plan. For more * information about the Google payment plans, see the API concepts. * * Possible values are: - ANNUAL_MONTHLY_PAY — The annual commitment plan with monthly payments. * Caution: ANNUAL_MONTHLY_PAY is returned as ANNUAL in all API responses. - ANNUAL_YEARLY_PAY — * The annual commitment plan with yearly payments - FLEXIBLE — The flexible plan - TRIAL — * The 30-day free trial plan. A subscription in trial will be suspended after the 30th free day * if no payment plan is assigned. Calling changePlan will assign a payment plan to a trial but * will not activate the plan. A trial will automatically begin its assigned payment plan after * its 30th free day or immediately after calling startPaidService. - FREE — The free plan is * exclusive to the Cloud Identity SKU and does not incur any billing. * @return value or {@code null} for none */ public java.lang.String getPlanName() { return planName; } /** * The planName property is required. This is the name of the subscription's plan. For more * information about the Google payment plans, see the API concepts. * * Possible values are: - ANNUAL_MONTHLY_PAY — The annual commitment plan with monthly payments. * Caution: ANNUAL_MONTHLY_PAY is returned as ANNUAL in all API responses. - ANNUAL_YEARLY_PAY — * The annual commitment plan with yearly payments - FLEXIBLE — The flexible plan - TRIAL — * The 30-day free trial plan. A subscription in trial will be suspended after the 30th free day * if no payment plan is assigned. Calling changePlan will assign a payment plan to a trial but * will not activate the plan. A trial will automatically begin its assigned payment plan after * its 30th free day or immediately after calling startPaidService. - FREE — The free plan is * exclusive to the Cloud Identity SKU and does not incur any billing. * @param planName planName or {@code null} for none */ public Plan setPlanName(java.lang.String planName) { this.planName = planName; return this; } @Override public Plan set(String fieldName, Object value) { return (Plan) super.set(fieldName, value); } @Override public Plan clone() { return (Plan) super.clone(); } /** * In this version of the API, annual commitment plan's interval is one year. Note: When * billingMethod value is OFFLINE, the subscription property object plan.commitmentInterval is * omitted in all API responses. */ public static final class CommitmentInterval extends com.google.api.client.json.GenericJson { /** * An annual commitment plan's interval's endTime in milliseconds using the UNIX Epoch format. See * an example Epoch converter. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long endTime; /** * An annual commitment plan's interval's startTime in milliseconds using UNIX Epoch format. See * an example Epoch converter. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long startTime; /** * An annual commitment plan's interval's endTime in milliseconds using the UNIX Epoch format. See * an example Epoch converter. * @return value or {@code null} for none */ public java.lang.Long getEndTime() { return endTime; } /** * An annual commitment plan's interval's endTime in milliseconds using the UNIX Epoch format. See * an example Epoch converter. * @param endTime endTime or {@code null} for none */ public CommitmentInterval setEndTime(java.lang.Long endTime) { this.endTime = endTime; return this; } /** * An annual commitment plan's interval's startTime in milliseconds using UNIX Epoch format. See * an example Epoch converter. * @return value or {@code null} for none */ public java.lang.Long getStartTime() { return startTime; } /** * An annual commitment plan's interval's startTime in milliseconds using UNIX Epoch format. See * an example Epoch converter. * @param startTime startTime or {@code null} for none */ public CommitmentInterval setStartTime(java.lang.Long startTime) { this.startTime = startTime; return this; } @Override public CommitmentInterval set(String fieldName, Object value) { return (CommitmentInterval) super.set(fieldName, value); } @Override public CommitmentInterval clone() { return (CommitmentInterval) super.clone(); } } } /** * Read-only transfer related information for the subscription. For more information, see retrieve * transferable subscriptions for a customer. */ public static final class TransferInfo extends com.google.api.client.json.GenericJson { /** * When inserting a subscription, this is the minimum number of seats listed in the transfer order * for this product. For example, if the customer has 20 users, the reseller cannot place a * transfer order of 15 seats. The minimum is 20 seats. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer minimumTransferableSeats; /** * The time when transfer token or intent to transfer will expire. The time is in milliseconds * using UNIX Epoch format. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long transferabilityExpirationTime; /** * When inserting a subscription, this is the minimum number of seats listed in the transfer order * for this product. For example, if the customer has 20 users, the reseller cannot place a * transfer order of 15 seats. The minimum is 20 seats. * @return value or {@code null} for none */ public java.lang.Integer getMinimumTransferableSeats() { return minimumTransferableSeats; } /** * When inserting a subscription, this is the minimum number of seats listed in the transfer order * for this product. For example, if the customer has 20 users, the reseller cannot place a * transfer order of 15 seats. The minimum is 20 seats. * @param minimumTransferableSeats minimumTransferableSeats or {@code null} for none */ public TransferInfo setMinimumTransferableSeats(java.lang.Integer minimumTransferableSeats) { this.minimumTransferableSeats = minimumTransferableSeats; return this; } /** * The time when transfer token or intent to transfer will expire. The time is in milliseconds * using UNIX Epoch format. * @return value or {@code null} for none */ public java.lang.Long getTransferabilityExpirationTime() { return transferabilityExpirationTime; } /** * The time when transfer token or intent to transfer will expire. The time is in milliseconds * using UNIX Epoch format. * @param transferabilityExpirationTime transferabilityExpirationTime or {@code null} for none */ public TransferInfo setTransferabilityExpirationTime(java.lang.Long transferabilityExpirationTime) { this.transferabilityExpirationTime = transferabilityExpirationTime; return this; } @Override public TransferInfo set(String fieldName, Object value) { return (TransferInfo) super.set(fieldName, value); } @Override public TransferInfo clone() { return (TransferInfo) super.clone(); } } /** * The G Suite annual commitment and flexible payment plans can be in a 30-day free trial. For more * information, see the API concepts. */ public static final class TrialSettings extends com.google.api.client.json.GenericJson { /** * Determines if a subscription's plan is in a 30-day free trial or not: - true — The plan is in * trial. - false — The plan is not in trial. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean isInTrial; /** * Date when the trial ends. The value is in milliseconds using the UNIX Epoch format. See an * example Epoch converter. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long trialEndTime; /** * Determines if a subscription's plan is in a 30-day free trial or not: - true — The plan is in * trial. - false — The plan is not in trial. * @return value or {@code null} for none */ public java.lang.Boolean getIsInTrial() { return isInTrial; } /** * Determines if a subscription's plan is in a 30-day free trial or not: - true — The plan is in * trial. - false — The plan is not in trial. * @param isInTrial isInTrial or {@code null} for none */ public TrialSettings setIsInTrial(java.lang.Boolean isInTrial) { this.isInTrial = isInTrial; return this; } /** * Date when the trial ends. The value is in milliseconds using the UNIX Epoch format. See an * example Epoch converter. * @return value or {@code null} for none */ public java.lang.Long getTrialEndTime() { return trialEndTime; } /** * Date when the trial ends. The value is in milliseconds using the UNIX Epoch format. See an * example Epoch converter. * @param trialEndTime trialEndTime or {@code null} for none */ public TrialSettings setTrialEndTime(java.lang.Long trialEndTime) { this.trialEndTime = trialEndTime; return this; } @Override public TrialSettings set(String fieldName, Object value) { return (TrialSettings) super.set(fieldName, value); } @Override public TrialSettings clone() { return (TrialSettings) super.clone(); } } }
googleapis/google-api-java-client-services
35,168
clients/google-api-services-reseller/v1/1.28.0/com/google/api/services/reseller/model/Subscription.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.reseller.model; /** * JSON template for a subscription. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Enterprise Apps Reseller API. For a detailed * explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class Subscription extends com.google.api.client.json.GenericJson { /** * Read-only field that returns the current billing method for a subscription. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String billingMethod; /** * The creationTime property is the date when subscription was created. It is in milliseconds * using the Epoch format. See an example Epoch converter. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long creationTime; /** * Primary domain name of the customer * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String customerDomain; /** * This property will always be returned in a response as the unique identifier generated by * Google. In a request, this property can be either the primary domain or the unique identifier * generated by Google. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String customerId; /** * Google-issued code (100 char max) for discounted pricing on subscription plans. Deal code must * be included in insert requests in order to receive discounted rate. This property is optional, * regular pricing applies if left empty. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String dealCode; /** * Identifies the resource as a Subscription. Value: reseller#subscription * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String kind; /** * The plan property is required. In this version of the API, the G Suite plans are the flexible * plan, annual commitment plan, and the 30-day free trial plan. For more information about the * API"s payment plans, see the API concepts. * The value may be {@code null}. */ @com.google.api.client.util.Key private Plan plan; /** * This is an optional property. This purchase order (PO) information is for resellers to use for * their company tracking usage. If a purchaseOrderId value is given it appears in the API * responses and shows up in the invoice. The property accepts up to 80 plain text characters. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String purchaseOrderId; /** * Renewal settings for the annual commitment plan. For more detailed information, see renewal * options in the administrator help center. * The value may be {@code null}. */ @com.google.api.client.util.Key private RenewalSettings renewalSettings; /** * URL to customer's Subscriptions page in the Admin console. The read-only URL is generated by * the API service. This is used if your client application requires the customer to complete a * task using the Subscriptions page in the Admin console. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String resourceUiUrl; /** * This is a required property. The number and limit of user seat licenses in the plan. * The value may be {@code null}. */ @com.google.api.client.util.Key private Seats seats; /** * A required property. The skuId is a unique system identifier for a product's SKU assigned to a * customer in the subscription. For products and SKUs available in this version of the API, see * Product and SKU IDs. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String skuId; /** * Read-only external display name for a product's SKU assigned to a customer in the subscription. * SKU names are subject to change at Google's discretion. For products and SKUs available in this * version of the API, see Product and SKU IDs. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String skuName; /** * This is an optional property. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String status; /** * The subscriptionId is the subscription identifier and is unique for each customer. This is a * required property. Since a subscriptionId changes when a subscription is updated, we recommend * not using this ID as a key for persistent data. Use the subscriptionId as described in retrieve * all reseller subscriptions. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String subscriptionId; /** * Read-only field containing an enumerable of all the current suspension reasons for a * subscription. It is possible for a subscription to have many concurrent, overlapping suspension * reasons. A subscription's STATUS is SUSPENDED until all pending suspensions are removed. * * Possible options include: - PENDING_TOS_ACCEPTANCE - The customer has not logged in and * accepted the G Suite Resold Terms of Services. - RENEWAL_WITH_TYPE_CANCEL - The customer's * commitment ended and their service was cancelled at the end of their term. - * RESELLER_INITIATED - A manual suspension invoked by a Reseller. - TRIAL_ENDED - The * customer's trial expired without a plan selected. - OTHER - The customer is suspended for an * internal Google reason (e.g. abuse or otherwise). * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> suspensionReasons; /** * Read-only transfer related information for the subscription. For more information, see retrieve * transferable subscriptions for a customer. * The value may be {@code null}. */ @com.google.api.client.util.Key private TransferInfo transferInfo; /** * The G Suite annual commitment and flexible payment plans can be in a 30-day free trial. For * more information, see the API concepts. * The value may be {@code null}. */ @com.google.api.client.util.Key private TrialSettings trialSettings; /** * Read-only field that returns the current billing method for a subscription. * @return value or {@code null} for none */ public java.lang.String getBillingMethod() { return billingMethod; } /** * Read-only field that returns the current billing method for a subscription. * @param billingMethod billingMethod or {@code null} for none */ public Subscription setBillingMethod(java.lang.String billingMethod) { this.billingMethod = billingMethod; return this; } /** * The creationTime property is the date when subscription was created. It is in milliseconds * using the Epoch format. See an example Epoch converter. * @return value or {@code null} for none */ public java.lang.Long getCreationTime() { return creationTime; } /** * The creationTime property is the date when subscription was created. It is in milliseconds * using the Epoch format. See an example Epoch converter. * @param creationTime creationTime or {@code null} for none */ public Subscription setCreationTime(java.lang.Long creationTime) { this.creationTime = creationTime; return this; } /** * Primary domain name of the customer * @return value or {@code null} for none */ public java.lang.String getCustomerDomain() { return customerDomain; } /** * Primary domain name of the customer * @param customerDomain customerDomain or {@code null} for none */ public Subscription setCustomerDomain(java.lang.String customerDomain) { this.customerDomain = customerDomain; return this; } /** * This property will always be returned in a response as the unique identifier generated by * Google. In a request, this property can be either the primary domain or the unique identifier * generated by Google. * @return value or {@code null} for none */ public java.lang.String getCustomerId() { return customerId; } /** * This property will always be returned in a response as the unique identifier generated by * Google. In a request, this property can be either the primary domain or the unique identifier * generated by Google. * @param customerId customerId or {@code null} for none */ public Subscription setCustomerId(java.lang.String customerId) { this.customerId = customerId; return this; } /** * Google-issued code (100 char max) for discounted pricing on subscription plans. Deal code must * be included in insert requests in order to receive discounted rate. This property is optional, * regular pricing applies if left empty. * @return value or {@code null} for none */ public java.lang.String getDealCode() { return dealCode; } /** * Google-issued code (100 char max) for discounted pricing on subscription plans. Deal code must * be included in insert requests in order to receive discounted rate. This property is optional, * regular pricing applies if left empty. * @param dealCode dealCode or {@code null} for none */ public Subscription setDealCode(java.lang.String dealCode) { this.dealCode = dealCode; return this; } /** * Identifies the resource as a Subscription. Value: reseller#subscription * @return value or {@code null} for none */ public java.lang.String getKind() { return kind; } /** * Identifies the resource as a Subscription. Value: reseller#subscription * @param kind kind or {@code null} for none */ public Subscription setKind(java.lang.String kind) { this.kind = kind; return this; } /** * The plan property is required. In this version of the API, the G Suite plans are the flexible * plan, annual commitment plan, and the 30-day free trial plan. For more information about the * API"s payment plans, see the API concepts. * @return value or {@code null} for none */ public Plan getPlan() { return plan; } /** * The plan property is required. In this version of the API, the G Suite plans are the flexible * plan, annual commitment plan, and the 30-day free trial plan. For more information about the * API"s payment plans, see the API concepts. * @param plan plan or {@code null} for none */ public Subscription setPlan(Plan plan) { this.plan = plan; return this; } /** * This is an optional property. This purchase order (PO) information is for resellers to use for * their company tracking usage. If a purchaseOrderId value is given it appears in the API * responses and shows up in the invoice. The property accepts up to 80 plain text characters. * @return value or {@code null} for none */ public java.lang.String getPurchaseOrderId() { return purchaseOrderId; } /** * This is an optional property. This purchase order (PO) information is for resellers to use for * their company tracking usage. If a purchaseOrderId value is given it appears in the API * responses and shows up in the invoice. The property accepts up to 80 plain text characters. * @param purchaseOrderId purchaseOrderId or {@code null} for none */ public Subscription setPurchaseOrderId(java.lang.String purchaseOrderId) { this.purchaseOrderId = purchaseOrderId; return this; } /** * Renewal settings for the annual commitment plan. For more detailed information, see renewal * options in the administrator help center. * @return value or {@code null} for none */ public RenewalSettings getRenewalSettings() { return renewalSettings; } /** * Renewal settings for the annual commitment plan. For more detailed information, see renewal * options in the administrator help center. * @param renewalSettings renewalSettings or {@code null} for none */ public Subscription setRenewalSettings(RenewalSettings renewalSettings) { this.renewalSettings = renewalSettings; return this; } /** * URL to customer's Subscriptions page in the Admin console. The read-only URL is generated by * the API service. This is used if your client application requires the customer to complete a * task using the Subscriptions page in the Admin console. * @return value or {@code null} for none */ public java.lang.String getResourceUiUrl() { return resourceUiUrl; } /** * URL to customer's Subscriptions page in the Admin console. The read-only URL is generated by * the API service. This is used if your client application requires the customer to complete a * task using the Subscriptions page in the Admin console. * @param resourceUiUrl resourceUiUrl or {@code null} for none */ public Subscription setResourceUiUrl(java.lang.String resourceUiUrl) { this.resourceUiUrl = resourceUiUrl; return this; } /** * This is a required property. The number and limit of user seat licenses in the plan. * @return value or {@code null} for none */ public Seats getSeats() { return seats; } /** * This is a required property. The number and limit of user seat licenses in the plan. * @param seats seats or {@code null} for none */ public Subscription setSeats(Seats seats) { this.seats = seats; return this; } /** * A required property. The skuId is a unique system identifier for a product's SKU assigned to a * customer in the subscription. For products and SKUs available in this version of the API, see * Product and SKU IDs. * @return value or {@code null} for none */ public java.lang.String getSkuId() { return skuId; } /** * A required property. The skuId is a unique system identifier for a product's SKU assigned to a * customer in the subscription. For products and SKUs available in this version of the API, see * Product and SKU IDs. * @param skuId skuId or {@code null} for none */ public Subscription setSkuId(java.lang.String skuId) { this.skuId = skuId; return this; } /** * Read-only external display name for a product's SKU assigned to a customer in the subscription. * SKU names are subject to change at Google's discretion. For products and SKUs available in this * version of the API, see Product and SKU IDs. * @return value or {@code null} for none */ public java.lang.String getSkuName() { return skuName; } /** * Read-only external display name for a product's SKU assigned to a customer in the subscription. * SKU names are subject to change at Google's discretion. For products and SKUs available in this * version of the API, see Product and SKU IDs. * @param skuName skuName or {@code null} for none */ public Subscription setSkuName(java.lang.String skuName) { this.skuName = skuName; return this; } /** * This is an optional property. * @return value or {@code null} for none */ public java.lang.String getStatus() { return status; } /** * This is an optional property. * @param status status or {@code null} for none */ public Subscription setStatus(java.lang.String status) { this.status = status; return this; } /** * The subscriptionId is the subscription identifier and is unique for each customer. This is a * required property. Since a subscriptionId changes when a subscription is updated, we recommend * not using this ID as a key for persistent data. Use the subscriptionId as described in retrieve * all reseller subscriptions. * @return value or {@code null} for none */ public java.lang.String getSubscriptionId() { return subscriptionId; } /** * The subscriptionId is the subscription identifier and is unique for each customer. This is a * required property. Since a subscriptionId changes when a subscription is updated, we recommend * not using this ID as a key for persistent data. Use the subscriptionId as described in retrieve * all reseller subscriptions. * @param subscriptionId subscriptionId or {@code null} for none */ public Subscription setSubscriptionId(java.lang.String subscriptionId) { this.subscriptionId = subscriptionId; return this; } /** * Read-only field containing an enumerable of all the current suspension reasons for a * subscription. It is possible for a subscription to have many concurrent, overlapping suspension * reasons. A subscription's STATUS is SUSPENDED until all pending suspensions are removed. * * Possible options include: - PENDING_TOS_ACCEPTANCE - The customer has not logged in and * accepted the G Suite Resold Terms of Services. - RENEWAL_WITH_TYPE_CANCEL - The customer's * commitment ended and their service was cancelled at the end of their term. - * RESELLER_INITIATED - A manual suspension invoked by a Reseller. - TRIAL_ENDED - The * customer's trial expired without a plan selected. - OTHER - The customer is suspended for an * internal Google reason (e.g. abuse or otherwise). * @return value or {@code null} for none */ public java.util.List<java.lang.String> getSuspensionReasons() { return suspensionReasons; } /** * Read-only field containing an enumerable of all the current suspension reasons for a * subscription. It is possible for a subscription to have many concurrent, overlapping suspension * reasons. A subscription's STATUS is SUSPENDED until all pending suspensions are removed. * * Possible options include: - PENDING_TOS_ACCEPTANCE - The customer has not logged in and * accepted the G Suite Resold Terms of Services. - RENEWAL_WITH_TYPE_CANCEL - The customer's * commitment ended and their service was cancelled at the end of their term. - * RESELLER_INITIATED - A manual suspension invoked by a Reseller. - TRIAL_ENDED - The * customer's trial expired without a plan selected. - OTHER - The customer is suspended for an * internal Google reason (e.g. abuse or otherwise). * @param suspensionReasons suspensionReasons or {@code null} for none */ public Subscription setSuspensionReasons(java.util.List<java.lang.String> suspensionReasons) { this.suspensionReasons = suspensionReasons; return this; } /** * Read-only transfer related information for the subscription. For more information, see retrieve * transferable subscriptions for a customer. * @return value or {@code null} for none */ public TransferInfo getTransferInfo() { return transferInfo; } /** * Read-only transfer related information for the subscription. For more information, see retrieve * transferable subscriptions for a customer. * @param transferInfo transferInfo or {@code null} for none */ public Subscription setTransferInfo(TransferInfo transferInfo) { this.transferInfo = transferInfo; return this; } /** * The G Suite annual commitment and flexible payment plans can be in a 30-day free trial. For * more information, see the API concepts. * @return value or {@code null} for none */ public TrialSettings getTrialSettings() { return trialSettings; } /** * The G Suite annual commitment and flexible payment plans can be in a 30-day free trial. For * more information, see the API concepts. * @param trialSettings trialSettings or {@code null} for none */ public Subscription setTrialSettings(TrialSettings trialSettings) { this.trialSettings = trialSettings; return this; } @Override public Subscription set(String fieldName, Object value) { return (Subscription) super.set(fieldName, value); } @Override public Subscription clone() { return (Subscription) super.clone(); } /** * The plan property is required. In this version of the API, the G Suite plans are the flexible * plan, annual commitment plan, and the 30-day free trial plan. For more information about the * API"s payment plans, see the API concepts. */ public static final class Plan extends com.google.api.client.json.GenericJson { /** * In this version of the API, annual commitment plan's interval is one year. Note: When * billingMethod value is OFFLINE, the subscription property object plan.commitmentInterval is * omitted in all API responses. * The value may be {@code null}. */ @com.google.api.client.util.Key private CommitmentInterval commitmentInterval; /** * The isCommitmentPlan property's boolean value identifies the plan as an annual commitment plan: * - true — The subscription's plan is an annual commitment plan. - false — The plan is not an * annual commitment plan. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean isCommitmentPlan; /** * The planName property is required. This is the name of the subscription's plan. For more * information about the Google payment plans, see the API concepts. * * Possible values are: - ANNUAL_MONTHLY_PAY — The annual commitment plan with monthly payments. * Caution: ANNUAL_MONTHLY_PAY is returned as ANNUAL in all API responses. - ANNUAL_YEARLY_PAY — * The annual commitment plan with yearly payments - FLEXIBLE — The flexible plan - TRIAL — * The 30-day free trial plan. A subscription in trial will be suspended after the 30th free day * if no payment plan is assigned. Calling changePlan will assign a payment plan to a trial but * will not activate the plan. A trial will automatically begin its assigned payment plan after * its 30th free day or immediately after calling startPaidService. - FREE — The free plan is * exclusive to the Cloud Identity SKU and does not incur any billing. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String planName; /** * In this version of the API, annual commitment plan's interval is one year. Note: When * billingMethod value is OFFLINE, the subscription property object plan.commitmentInterval is * omitted in all API responses. * @return value or {@code null} for none */ public CommitmentInterval getCommitmentInterval() { return commitmentInterval; } /** * In this version of the API, annual commitment plan's interval is one year. Note: When * billingMethod value is OFFLINE, the subscription property object plan.commitmentInterval is * omitted in all API responses. * @param commitmentInterval commitmentInterval or {@code null} for none */ public Plan setCommitmentInterval(CommitmentInterval commitmentInterval) { this.commitmentInterval = commitmentInterval; return this; } /** * The isCommitmentPlan property's boolean value identifies the plan as an annual commitment plan: * - true — The subscription's plan is an annual commitment plan. - false — The plan is not an * annual commitment plan. * @return value or {@code null} for none */ public java.lang.Boolean getIsCommitmentPlan() { return isCommitmentPlan; } /** * The isCommitmentPlan property's boolean value identifies the plan as an annual commitment plan: * - true — The subscription's plan is an annual commitment plan. - false — The plan is not an * annual commitment plan. * @param isCommitmentPlan isCommitmentPlan or {@code null} for none */ public Plan setIsCommitmentPlan(java.lang.Boolean isCommitmentPlan) { this.isCommitmentPlan = isCommitmentPlan; return this; } /** * The planName property is required. This is the name of the subscription's plan. For more * information about the Google payment plans, see the API concepts. * * Possible values are: - ANNUAL_MONTHLY_PAY — The annual commitment plan with monthly payments. * Caution: ANNUAL_MONTHLY_PAY is returned as ANNUAL in all API responses. - ANNUAL_YEARLY_PAY — * The annual commitment plan with yearly payments - FLEXIBLE — The flexible plan - TRIAL — * The 30-day free trial plan. A subscription in trial will be suspended after the 30th free day * if no payment plan is assigned. Calling changePlan will assign a payment plan to a trial but * will not activate the plan. A trial will automatically begin its assigned payment plan after * its 30th free day or immediately after calling startPaidService. - FREE — The free plan is * exclusive to the Cloud Identity SKU and does not incur any billing. * @return value or {@code null} for none */ public java.lang.String getPlanName() { return planName; } /** * The planName property is required. This is the name of the subscription's plan. For more * information about the Google payment plans, see the API concepts. * * Possible values are: - ANNUAL_MONTHLY_PAY — The annual commitment plan with monthly payments. * Caution: ANNUAL_MONTHLY_PAY is returned as ANNUAL in all API responses. - ANNUAL_YEARLY_PAY — * The annual commitment plan with yearly payments - FLEXIBLE — The flexible plan - TRIAL — * The 30-day free trial plan. A subscription in trial will be suspended after the 30th free day * if no payment plan is assigned. Calling changePlan will assign a payment plan to a trial but * will not activate the plan. A trial will automatically begin its assigned payment plan after * its 30th free day or immediately after calling startPaidService. - FREE — The free plan is * exclusive to the Cloud Identity SKU and does not incur any billing. * @param planName planName or {@code null} for none */ public Plan setPlanName(java.lang.String planName) { this.planName = planName; return this; } @Override public Plan set(String fieldName, Object value) { return (Plan) super.set(fieldName, value); } @Override public Plan clone() { return (Plan) super.clone(); } /** * In this version of the API, annual commitment plan's interval is one year. Note: When * billingMethod value is OFFLINE, the subscription property object plan.commitmentInterval is * omitted in all API responses. */ public static final class CommitmentInterval extends com.google.api.client.json.GenericJson { /** * An annual commitment plan's interval's endTime in milliseconds using the UNIX Epoch format. See * an example Epoch converter. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long endTime; /** * An annual commitment plan's interval's startTime in milliseconds using UNIX Epoch format. See * an example Epoch converter. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long startTime; /** * An annual commitment plan's interval's endTime in milliseconds using the UNIX Epoch format. See * an example Epoch converter. * @return value or {@code null} for none */ public java.lang.Long getEndTime() { return endTime; } /** * An annual commitment plan's interval's endTime in milliseconds using the UNIX Epoch format. See * an example Epoch converter. * @param endTime endTime or {@code null} for none */ public CommitmentInterval setEndTime(java.lang.Long endTime) { this.endTime = endTime; return this; } /** * An annual commitment plan's interval's startTime in milliseconds using UNIX Epoch format. See * an example Epoch converter. * @return value or {@code null} for none */ public java.lang.Long getStartTime() { return startTime; } /** * An annual commitment plan's interval's startTime in milliseconds using UNIX Epoch format. See * an example Epoch converter. * @param startTime startTime or {@code null} for none */ public CommitmentInterval setStartTime(java.lang.Long startTime) { this.startTime = startTime; return this; } @Override public CommitmentInterval set(String fieldName, Object value) { return (CommitmentInterval) super.set(fieldName, value); } @Override public CommitmentInterval clone() { return (CommitmentInterval) super.clone(); } } } /** * Read-only transfer related information for the subscription. For more information, see retrieve * transferable subscriptions for a customer. */ public static final class TransferInfo extends com.google.api.client.json.GenericJson { /** * When inserting a subscription, this is the minimum number of seats listed in the transfer order * for this product. For example, if the customer has 20 users, the reseller cannot place a * transfer order of 15 seats. The minimum is 20 seats. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer minimumTransferableSeats; /** * The time when transfer token or intent to transfer will expire. The time is in milliseconds * using UNIX Epoch format. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long transferabilityExpirationTime; /** * When inserting a subscription, this is the minimum number of seats listed in the transfer order * for this product. For example, if the customer has 20 users, the reseller cannot place a * transfer order of 15 seats. The minimum is 20 seats. * @return value or {@code null} for none */ public java.lang.Integer getMinimumTransferableSeats() { return minimumTransferableSeats; } /** * When inserting a subscription, this is the minimum number of seats listed in the transfer order * for this product. For example, if the customer has 20 users, the reseller cannot place a * transfer order of 15 seats. The minimum is 20 seats. * @param minimumTransferableSeats minimumTransferableSeats or {@code null} for none */ public TransferInfo setMinimumTransferableSeats(java.lang.Integer minimumTransferableSeats) { this.minimumTransferableSeats = minimumTransferableSeats; return this; } /** * The time when transfer token or intent to transfer will expire. The time is in milliseconds * using UNIX Epoch format. * @return value or {@code null} for none */ public java.lang.Long getTransferabilityExpirationTime() { return transferabilityExpirationTime; } /** * The time when transfer token or intent to transfer will expire. The time is in milliseconds * using UNIX Epoch format. * @param transferabilityExpirationTime transferabilityExpirationTime or {@code null} for none */ public TransferInfo setTransferabilityExpirationTime(java.lang.Long transferabilityExpirationTime) { this.transferabilityExpirationTime = transferabilityExpirationTime; return this; } @Override public TransferInfo set(String fieldName, Object value) { return (TransferInfo) super.set(fieldName, value); } @Override public TransferInfo clone() { return (TransferInfo) super.clone(); } } /** * The G Suite annual commitment and flexible payment plans can be in a 30-day free trial. For more * information, see the API concepts. */ public static final class TrialSettings extends com.google.api.client.json.GenericJson { /** * Determines if a subscription's plan is in a 30-day free trial or not: - true — The plan is in * trial. - false — The plan is not in trial. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean isInTrial; /** * Date when the trial ends. The value is in milliseconds using the UNIX Epoch format. See an * example Epoch converter. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long trialEndTime; /** * Determines if a subscription's plan is in a 30-day free trial or not: - true — The plan is in * trial. - false — The plan is not in trial. * @return value or {@code null} for none */ public java.lang.Boolean getIsInTrial() { return isInTrial; } /** * Determines if a subscription's plan is in a 30-day free trial or not: - true — The plan is in * trial. - false — The plan is not in trial. * @param isInTrial isInTrial or {@code null} for none */ public TrialSettings setIsInTrial(java.lang.Boolean isInTrial) { this.isInTrial = isInTrial; return this; } /** * Date when the trial ends. The value is in milliseconds using the UNIX Epoch format. See an * example Epoch converter. * @return value or {@code null} for none */ public java.lang.Long getTrialEndTime() { return trialEndTime; } /** * Date when the trial ends. The value is in milliseconds using the UNIX Epoch format. See an * example Epoch converter. * @param trialEndTime trialEndTime or {@code null} for none */ public TrialSettings setTrialEndTime(java.lang.Long trialEndTime) { this.trialEndTime = trialEndTime; return this; } @Override public TrialSettings set(String fieldName, Object value) { return (TrialSettings) super.set(fieldName, value); } @Override public TrialSettings clone() { return (TrialSettings) super.clone(); } } }
googleapis/google-api-java-client-services
35,168
clients/google-api-services-reseller/v1/1.29.2/com/google/api/services/reseller/model/Subscription.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.reseller.model; /** * JSON template for a subscription. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Enterprise Apps Reseller API. For a detailed * explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class Subscription extends com.google.api.client.json.GenericJson { /** * Read-only field that returns the current billing method for a subscription. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String billingMethod; /** * The creationTime property is the date when subscription was created. It is in milliseconds * using the Epoch format. See an example Epoch converter. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long creationTime; /** * Primary domain name of the customer * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String customerDomain; /** * This property will always be returned in a response as the unique identifier generated by * Google. In a request, this property can be either the primary domain or the unique identifier * generated by Google. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String customerId; /** * Google-issued code (100 char max) for discounted pricing on subscription plans. Deal code must * be included in insert requests in order to receive discounted rate. This property is optional, * regular pricing applies if left empty. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String dealCode; /** * Identifies the resource as a Subscription. Value: reseller#subscription * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String kind; /** * The plan property is required. In this version of the API, the G Suite plans are the flexible * plan, annual commitment plan, and the 30-day free trial plan. For more information about the * API"s payment plans, see the API concepts. * The value may be {@code null}. */ @com.google.api.client.util.Key private Plan plan; /** * This is an optional property. This purchase order (PO) information is for resellers to use for * their company tracking usage. If a purchaseOrderId value is given it appears in the API * responses and shows up in the invoice. The property accepts up to 80 plain text characters. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String purchaseOrderId; /** * Renewal settings for the annual commitment plan. For more detailed information, see renewal * options in the administrator help center. * The value may be {@code null}. */ @com.google.api.client.util.Key private RenewalSettings renewalSettings; /** * URL to customer's Subscriptions page in the Admin console. The read-only URL is generated by * the API service. This is used if your client application requires the customer to complete a * task using the Subscriptions page in the Admin console. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String resourceUiUrl; /** * This is a required property. The number and limit of user seat licenses in the plan. * The value may be {@code null}. */ @com.google.api.client.util.Key private Seats seats; /** * A required property. The skuId is a unique system identifier for a product's SKU assigned to a * customer in the subscription. For products and SKUs available in this version of the API, see * Product and SKU IDs. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String skuId; /** * Read-only external display name for a product's SKU assigned to a customer in the subscription. * SKU names are subject to change at Google's discretion. For products and SKUs available in this * version of the API, see Product and SKU IDs. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String skuName; /** * This is an optional property. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String status; /** * The subscriptionId is the subscription identifier and is unique for each customer. This is a * required property. Since a subscriptionId changes when a subscription is updated, we recommend * not using this ID as a key for persistent data. Use the subscriptionId as described in retrieve * all reseller subscriptions. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String subscriptionId; /** * Read-only field containing an enumerable of all the current suspension reasons for a * subscription. It is possible for a subscription to have many concurrent, overlapping suspension * reasons. A subscription's STATUS is SUSPENDED until all pending suspensions are removed. * * Possible options include: - PENDING_TOS_ACCEPTANCE - The customer has not logged in and * accepted the G Suite Resold Terms of Services. - RENEWAL_WITH_TYPE_CANCEL - The customer's * commitment ended and their service was cancelled at the end of their term. - * RESELLER_INITIATED - A manual suspension invoked by a Reseller. - TRIAL_ENDED - The * customer's trial expired without a plan selected. - OTHER - The customer is suspended for an * internal Google reason (e.g. abuse or otherwise). * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> suspensionReasons; /** * Read-only transfer related information for the subscription. For more information, see retrieve * transferable subscriptions for a customer. * The value may be {@code null}. */ @com.google.api.client.util.Key private TransferInfo transferInfo; /** * The G Suite annual commitment and flexible payment plans can be in a 30-day free trial. For * more information, see the API concepts. * The value may be {@code null}. */ @com.google.api.client.util.Key private TrialSettings trialSettings; /** * Read-only field that returns the current billing method for a subscription. * @return value or {@code null} for none */ public java.lang.String getBillingMethod() { return billingMethod; } /** * Read-only field that returns the current billing method for a subscription. * @param billingMethod billingMethod or {@code null} for none */ public Subscription setBillingMethod(java.lang.String billingMethod) { this.billingMethod = billingMethod; return this; } /** * The creationTime property is the date when subscription was created. It is in milliseconds * using the Epoch format. See an example Epoch converter. * @return value or {@code null} for none */ public java.lang.Long getCreationTime() { return creationTime; } /** * The creationTime property is the date when subscription was created. It is in milliseconds * using the Epoch format. See an example Epoch converter. * @param creationTime creationTime or {@code null} for none */ public Subscription setCreationTime(java.lang.Long creationTime) { this.creationTime = creationTime; return this; } /** * Primary domain name of the customer * @return value or {@code null} for none */ public java.lang.String getCustomerDomain() { return customerDomain; } /** * Primary domain name of the customer * @param customerDomain customerDomain or {@code null} for none */ public Subscription setCustomerDomain(java.lang.String customerDomain) { this.customerDomain = customerDomain; return this; } /** * This property will always be returned in a response as the unique identifier generated by * Google. In a request, this property can be either the primary domain or the unique identifier * generated by Google. * @return value or {@code null} for none */ public java.lang.String getCustomerId() { return customerId; } /** * This property will always be returned in a response as the unique identifier generated by * Google. In a request, this property can be either the primary domain or the unique identifier * generated by Google. * @param customerId customerId or {@code null} for none */ public Subscription setCustomerId(java.lang.String customerId) { this.customerId = customerId; return this; } /** * Google-issued code (100 char max) for discounted pricing on subscription plans. Deal code must * be included in insert requests in order to receive discounted rate. This property is optional, * regular pricing applies if left empty. * @return value or {@code null} for none */ public java.lang.String getDealCode() { return dealCode; } /** * Google-issued code (100 char max) for discounted pricing on subscription plans. Deal code must * be included in insert requests in order to receive discounted rate. This property is optional, * regular pricing applies if left empty. * @param dealCode dealCode or {@code null} for none */ public Subscription setDealCode(java.lang.String dealCode) { this.dealCode = dealCode; return this; } /** * Identifies the resource as a Subscription. Value: reseller#subscription * @return value or {@code null} for none */ public java.lang.String getKind() { return kind; } /** * Identifies the resource as a Subscription. Value: reseller#subscription * @param kind kind or {@code null} for none */ public Subscription setKind(java.lang.String kind) { this.kind = kind; return this; } /** * The plan property is required. In this version of the API, the G Suite plans are the flexible * plan, annual commitment plan, and the 30-day free trial plan. For more information about the * API"s payment plans, see the API concepts. * @return value or {@code null} for none */ public Plan getPlan() { return plan; } /** * The plan property is required. In this version of the API, the G Suite plans are the flexible * plan, annual commitment plan, and the 30-day free trial plan. For more information about the * API"s payment plans, see the API concepts. * @param plan plan or {@code null} for none */ public Subscription setPlan(Plan plan) { this.plan = plan; return this; } /** * This is an optional property. This purchase order (PO) information is for resellers to use for * their company tracking usage. If a purchaseOrderId value is given it appears in the API * responses and shows up in the invoice. The property accepts up to 80 plain text characters. * @return value or {@code null} for none */ public java.lang.String getPurchaseOrderId() { return purchaseOrderId; } /** * This is an optional property. This purchase order (PO) information is for resellers to use for * their company tracking usage. If a purchaseOrderId value is given it appears in the API * responses and shows up in the invoice. The property accepts up to 80 plain text characters. * @param purchaseOrderId purchaseOrderId or {@code null} for none */ public Subscription setPurchaseOrderId(java.lang.String purchaseOrderId) { this.purchaseOrderId = purchaseOrderId; return this; } /** * Renewal settings for the annual commitment plan. For more detailed information, see renewal * options in the administrator help center. * @return value or {@code null} for none */ public RenewalSettings getRenewalSettings() { return renewalSettings; } /** * Renewal settings for the annual commitment plan. For more detailed information, see renewal * options in the administrator help center. * @param renewalSettings renewalSettings or {@code null} for none */ public Subscription setRenewalSettings(RenewalSettings renewalSettings) { this.renewalSettings = renewalSettings; return this; } /** * URL to customer's Subscriptions page in the Admin console. The read-only URL is generated by * the API service. This is used if your client application requires the customer to complete a * task using the Subscriptions page in the Admin console. * @return value or {@code null} for none */ public java.lang.String getResourceUiUrl() { return resourceUiUrl; } /** * URL to customer's Subscriptions page in the Admin console. The read-only URL is generated by * the API service. This is used if your client application requires the customer to complete a * task using the Subscriptions page in the Admin console. * @param resourceUiUrl resourceUiUrl or {@code null} for none */ public Subscription setResourceUiUrl(java.lang.String resourceUiUrl) { this.resourceUiUrl = resourceUiUrl; return this; } /** * This is a required property. The number and limit of user seat licenses in the plan. * @return value or {@code null} for none */ public Seats getSeats() { return seats; } /** * This is a required property. The number and limit of user seat licenses in the plan. * @param seats seats or {@code null} for none */ public Subscription setSeats(Seats seats) { this.seats = seats; return this; } /** * A required property. The skuId is a unique system identifier for a product's SKU assigned to a * customer in the subscription. For products and SKUs available in this version of the API, see * Product and SKU IDs. * @return value or {@code null} for none */ public java.lang.String getSkuId() { return skuId; } /** * A required property. The skuId is a unique system identifier for a product's SKU assigned to a * customer in the subscription. For products and SKUs available in this version of the API, see * Product and SKU IDs. * @param skuId skuId or {@code null} for none */ public Subscription setSkuId(java.lang.String skuId) { this.skuId = skuId; return this; } /** * Read-only external display name for a product's SKU assigned to a customer in the subscription. * SKU names are subject to change at Google's discretion. For products and SKUs available in this * version of the API, see Product and SKU IDs. * @return value or {@code null} for none */ public java.lang.String getSkuName() { return skuName; } /** * Read-only external display name for a product's SKU assigned to a customer in the subscription. * SKU names are subject to change at Google's discretion. For products and SKUs available in this * version of the API, see Product and SKU IDs. * @param skuName skuName or {@code null} for none */ public Subscription setSkuName(java.lang.String skuName) { this.skuName = skuName; return this; } /** * This is an optional property. * @return value or {@code null} for none */ public java.lang.String getStatus() { return status; } /** * This is an optional property. * @param status status or {@code null} for none */ public Subscription setStatus(java.lang.String status) { this.status = status; return this; } /** * The subscriptionId is the subscription identifier and is unique for each customer. This is a * required property. Since a subscriptionId changes when a subscription is updated, we recommend * not using this ID as a key for persistent data. Use the subscriptionId as described in retrieve * all reseller subscriptions. * @return value or {@code null} for none */ public java.lang.String getSubscriptionId() { return subscriptionId; } /** * The subscriptionId is the subscription identifier and is unique for each customer. This is a * required property. Since a subscriptionId changes when a subscription is updated, we recommend * not using this ID as a key for persistent data. Use the subscriptionId as described in retrieve * all reseller subscriptions. * @param subscriptionId subscriptionId or {@code null} for none */ public Subscription setSubscriptionId(java.lang.String subscriptionId) { this.subscriptionId = subscriptionId; return this; } /** * Read-only field containing an enumerable of all the current suspension reasons for a * subscription. It is possible for a subscription to have many concurrent, overlapping suspension * reasons. A subscription's STATUS is SUSPENDED until all pending suspensions are removed. * * Possible options include: - PENDING_TOS_ACCEPTANCE - The customer has not logged in and * accepted the G Suite Resold Terms of Services. - RENEWAL_WITH_TYPE_CANCEL - The customer's * commitment ended and their service was cancelled at the end of their term. - * RESELLER_INITIATED - A manual suspension invoked by a Reseller. - TRIAL_ENDED - The * customer's trial expired without a plan selected. - OTHER - The customer is suspended for an * internal Google reason (e.g. abuse or otherwise). * @return value or {@code null} for none */ public java.util.List<java.lang.String> getSuspensionReasons() { return suspensionReasons; } /** * Read-only field containing an enumerable of all the current suspension reasons for a * subscription. It is possible for a subscription to have many concurrent, overlapping suspension * reasons. A subscription's STATUS is SUSPENDED until all pending suspensions are removed. * * Possible options include: - PENDING_TOS_ACCEPTANCE - The customer has not logged in and * accepted the G Suite Resold Terms of Services. - RENEWAL_WITH_TYPE_CANCEL - The customer's * commitment ended and their service was cancelled at the end of their term. - * RESELLER_INITIATED - A manual suspension invoked by a Reseller. - TRIAL_ENDED - The * customer's trial expired without a plan selected. - OTHER - The customer is suspended for an * internal Google reason (e.g. abuse or otherwise). * @param suspensionReasons suspensionReasons or {@code null} for none */ public Subscription setSuspensionReasons(java.util.List<java.lang.String> suspensionReasons) { this.suspensionReasons = suspensionReasons; return this; } /** * Read-only transfer related information for the subscription. For more information, see retrieve * transferable subscriptions for a customer. * @return value or {@code null} for none */ public TransferInfo getTransferInfo() { return transferInfo; } /** * Read-only transfer related information for the subscription. For more information, see retrieve * transferable subscriptions for a customer. * @param transferInfo transferInfo or {@code null} for none */ public Subscription setTransferInfo(TransferInfo transferInfo) { this.transferInfo = transferInfo; return this; } /** * The G Suite annual commitment and flexible payment plans can be in a 30-day free trial. For * more information, see the API concepts. * @return value or {@code null} for none */ public TrialSettings getTrialSettings() { return trialSettings; } /** * The G Suite annual commitment and flexible payment plans can be in a 30-day free trial. For * more information, see the API concepts. * @param trialSettings trialSettings or {@code null} for none */ public Subscription setTrialSettings(TrialSettings trialSettings) { this.trialSettings = trialSettings; return this; } @Override public Subscription set(String fieldName, Object value) { return (Subscription) super.set(fieldName, value); } @Override public Subscription clone() { return (Subscription) super.clone(); } /** * The plan property is required. In this version of the API, the G Suite plans are the flexible * plan, annual commitment plan, and the 30-day free trial plan. For more information about the * API"s payment plans, see the API concepts. */ public static final class Plan extends com.google.api.client.json.GenericJson { /** * In this version of the API, annual commitment plan's interval is one year. Note: When * billingMethod value is OFFLINE, the subscription property object plan.commitmentInterval is * omitted in all API responses. * The value may be {@code null}. */ @com.google.api.client.util.Key private CommitmentInterval commitmentInterval; /** * The isCommitmentPlan property's boolean value identifies the plan as an annual commitment plan: * - true — The subscription's plan is an annual commitment plan. - false — The plan is not an * annual commitment plan. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean isCommitmentPlan; /** * The planName property is required. This is the name of the subscription's plan. For more * information about the Google payment plans, see the API concepts. * * Possible values are: - ANNUAL_MONTHLY_PAY — The annual commitment plan with monthly payments. * Caution: ANNUAL_MONTHLY_PAY is returned as ANNUAL in all API responses. - ANNUAL_YEARLY_PAY — * The annual commitment plan with yearly payments - FLEXIBLE — The flexible plan - TRIAL — * The 30-day free trial plan. A subscription in trial will be suspended after the 30th free day * if no payment plan is assigned. Calling changePlan will assign a payment plan to a trial but * will not activate the plan. A trial will automatically begin its assigned payment plan after * its 30th free day or immediately after calling startPaidService. - FREE — The free plan is * exclusive to the Cloud Identity SKU and does not incur any billing. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String planName; /** * In this version of the API, annual commitment plan's interval is one year. Note: When * billingMethod value is OFFLINE, the subscription property object plan.commitmentInterval is * omitted in all API responses. * @return value or {@code null} for none */ public CommitmentInterval getCommitmentInterval() { return commitmentInterval; } /** * In this version of the API, annual commitment plan's interval is one year. Note: When * billingMethod value is OFFLINE, the subscription property object plan.commitmentInterval is * omitted in all API responses. * @param commitmentInterval commitmentInterval or {@code null} for none */ public Plan setCommitmentInterval(CommitmentInterval commitmentInterval) { this.commitmentInterval = commitmentInterval; return this; } /** * The isCommitmentPlan property's boolean value identifies the plan as an annual commitment plan: * - true — The subscription's plan is an annual commitment plan. - false — The plan is not an * annual commitment plan. * @return value or {@code null} for none */ public java.lang.Boolean getIsCommitmentPlan() { return isCommitmentPlan; } /** * The isCommitmentPlan property's boolean value identifies the plan as an annual commitment plan: * - true — The subscription's plan is an annual commitment plan. - false — The plan is not an * annual commitment plan. * @param isCommitmentPlan isCommitmentPlan or {@code null} for none */ public Plan setIsCommitmentPlan(java.lang.Boolean isCommitmentPlan) { this.isCommitmentPlan = isCommitmentPlan; return this; } /** * The planName property is required. This is the name of the subscription's plan. For more * information about the Google payment plans, see the API concepts. * * Possible values are: - ANNUAL_MONTHLY_PAY — The annual commitment plan with monthly payments. * Caution: ANNUAL_MONTHLY_PAY is returned as ANNUAL in all API responses. - ANNUAL_YEARLY_PAY — * The annual commitment plan with yearly payments - FLEXIBLE — The flexible plan - TRIAL — * The 30-day free trial plan. A subscription in trial will be suspended after the 30th free day * if no payment plan is assigned. Calling changePlan will assign a payment plan to a trial but * will not activate the plan. A trial will automatically begin its assigned payment plan after * its 30th free day or immediately after calling startPaidService. - FREE — The free plan is * exclusive to the Cloud Identity SKU and does not incur any billing. * @return value or {@code null} for none */ public java.lang.String getPlanName() { return planName; } /** * The planName property is required. This is the name of the subscription's plan. For more * information about the Google payment plans, see the API concepts. * * Possible values are: - ANNUAL_MONTHLY_PAY — The annual commitment plan with monthly payments. * Caution: ANNUAL_MONTHLY_PAY is returned as ANNUAL in all API responses. - ANNUAL_YEARLY_PAY — * The annual commitment plan with yearly payments - FLEXIBLE — The flexible plan - TRIAL — * The 30-day free trial plan. A subscription in trial will be suspended after the 30th free day * if no payment plan is assigned. Calling changePlan will assign a payment plan to a trial but * will not activate the plan. A trial will automatically begin its assigned payment plan after * its 30th free day or immediately after calling startPaidService. - FREE — The free plan is * exclusive to the Cloud Identity SKU and does not incur any billing. * @param planName planName or {@code null} for none */ public Plan setPlanName(java.lang.String planName) { this.planName = planName; return this; } @Override public Plan set(String fieldName, Object value) { return (Plan) super.set(fieldName, value); } @Override public Plan clone() { return (Plan) super.clone(); } /** * In this version of the API, annual commitment plan's interval is one year. Note: When * billingMethod value is OFFLINE, the subscription property object plan.commitmentInterval is * omitted in all API responses. */ public static final class CommitmentInterval extends com.google.api.client.json.GenericJson { /** * An annual commitment plan's interval's endTime in milliseconds using the UNIX Epoch format. See * an example Epoch converter. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long endTime; /** * An annual commitment plan's interval's startTime in milliseconds using UNIX Epoch format. See * an example Epoch converter. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long startTime; /** * An annual commitment plan's interval's endTime in milliseconds using the UNIX Epoch format. See * an example Epoch converter. * @return value or {@code null} for none */ public java.lang.Long getEndTime() { return endTime; } /** * An annual commitment plan's interval's endTime in milliseconds using the UNIX Epoch format. See * an example Epoch converter. * @param endTime endTime or {@code null} for none */ public CommitmentInterval setEndTime(java.lang.Long endTime) { this.endTime = endTime; return this; } /** * An annual commitment plan's interval's startTime in milliseconds using UNIX Epoch format. See * an example Epoch converter. * @return value or {@code null} for none */ public java.lang.Long getStartTime() { return startTime; } /** * An annual commitment plan's interval's startTime in milliseconds using UNIX Epoch format. See * an example Epoch converter. * @param startTime startTime or {@code null} for none */ public CommitmentInterval setStartTime(java.lang.Long startTime) { this.startTime = startTime; return this; } @Override public CommitmentInterval set(String fieldName, Object value) { return (CommitmentInterval) super.set(fieldName, value); } @Override public CommitmentInterval clone() { return (CommitmentInterval) super.clone(); } } } /** * Read-only transfer related information for the subscription. For more information, see retrieve * transferable subscriptions for a customer. */ public static final class TransferInfo extends com.google.api.client.json.GenericJson { /** * When inserting a subscription, this is the minimum number of seats listed in the transfer order * for this product. For example, if the customer has 20 users, the reseller cannot place a * transfer order of 15 seats. The minimum is 20 seats. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer minimumTransferableSeats; /** * The time when transfer token or intent to transfer will expire. The time is in milliseconds * using UNIX Epoch format. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long transferabilityExpirationTime; /** * When inserting a subscription, this is the minimum number of seats listed in the transfer order * for this product. For example, if the customer has 20 users, the reseller cannot place a * transfer order of 15 seats. The minimum is 20 seats. * @return value or {@code null} for none */ public java.lang.Integer getMinimumTransferableSeats() { return minimumTransferableSeats; } /** * When inserting a subscription, this is the minimum number of seats listed in the transfer order * for this product. For example, if the customer has 20 users, the reseller cannot place a * transfer order of 15 seats. The minimum is 20 seats. * @param minimumTransferableSeats minimumTransferableSeats or {@code null} for none */ public TransferInfo setMinimumTransferableSeats(java.lang.Integer minimumTransferableSeats) { this.minimumTransferableSeats = minimumTransferableSeats; return this; } /** * The time when transfer token or intent to transfer will expire. The time is in milliseconds * using UNIX Epoch format. * @return value or {@code null} for none */ public java.lang.Long getTransferabilityExpirationTime() { return transferabilityExpirationTime; } /** * The time when transfer token or intent to transfer will expire. The time is in milliseconds * using UNIX Epoch format. * @param transferabilityExpirationTime transferabilityExpirationTime or {@code null} for none */ public TransferInfo setTransferabilityExpirationTime(java.lang.Long transferabilityExpirationTime) { this.transferabilityExpirationTime = transferabilityExpirationTime; return this; } @Override public TransferInfo set(String fieldName, Object value) { return (TransferInfo) super.set(fieldName, value); } @Override public TransferInfo clone() { return (TransferInfo) super.clone(); } } /** * The G Suite annual commitment and flexible payment plans can be in a 30-day free trial. For more * information, see the API concepts. */ public static final class TrialSettings extends com.google.api.client.json.GenericJson { /** * Determines if a subscription's plan is in a 30-day free trial or not: - true — The plan is in * trial. - false — The plan is not in trial. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean isInTrial; /** * Date when the trial ends. The value is in milliseconds using the UNIX Epoch format. See an * example Epoch converter. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long trialEndTime; /** * Determines if a subscription's plan is in a 30-day free trial or not: - true — The plan is in * trial. - false — The plan is not in trial. * @return value or {@code null} for none */ public java.lang.Boolean getIsInTrial() { return isInTrial; } /** * Determines if a subscription's plan is in a 30-day free trial or not: - true — The plan is in * trial. - false — The plan is not in trial. * @param isInTrial isInTrial or {@code null} for none */ public TrialSettings setIsInTrial(java.lang.Boolean isInTrial) { this.isInTrial = isInTrial; return this; } /** * Date when the trial ends. The value is in milliseconds using the UNIX Epoch format. See an * example Epoch converter. * @return value or {@code null} for none */ public java.lang.Long getTrialEndTime() { return trialEndTime; } /** * Date when the trial ends. The value is in milliseconds using the UNIX Epoch format. See an * example Epoch converter. * @param trialEndTime trialEndTime or {@code null} for none */ public TrialSettings setTrialEndTime(java.lang.Long trialEndTime) { this.trialEndTime = trialEndTime; return this; } @Override public TrialSettings set(String fieldName, Object value) { return (TrialSettings) super.set(fieldName, value); } @Override public TrialSettings clone() { return (TrialSettings) super.clone(); } } }
apache/hop
35,226
plugins/transforms/textfile/src/main/java/org/apache/hop/pipeline/transforms/csvinput/CsvInput.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hop.pipeline.transforms.csvinput; import java.io.BufferedInputStream; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.UnsupportedEncodingException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import org.apache.commons.io.ByteOrderMark; import org.apache.commons.io.input.BOMInputStream; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.provider.local.LocalFile; import org.apache.hop.core.ResultFile; import org.apache.hop.core.exception.HopConversionException; import org.apache.hop.core.exception.HopException; import org.apache.hop.core.exception.HopFileException; import org.apache.hop.core.exception.HopValueException; import org.apache.hop.core.file.EncodingType; import org.apache.hop.core.file.TextFileInputField; import org.apache.hop.core.row.IValueMeta; import org.apache.hop.core.row.RowDataUtil; import org.apache.hop.core.row.RowMeta; import org.apache.hop.core.util.Utils; import org.apache.hop.core.vfs.HopVfs; import org.apache.hop.i18n.BaseMessages; import org.apache.hop.pipeline.Pipeline; import org.apache.hop.pipeline.PipelineMeta; import org.apache.hop.pipeline.transform.BaseTransform; import org.apache.hop.pipeline.transform.TransformMeta; import org.apache.hop.pipeline.transforms.fileinput.TextFileInput; import org.apache.hop.pipeline.transforms.fileinput.TextFileInputMeta; import org.apache.hop.pipeline.transforms.fileinput.text.BOMDetector; import org.apache.hop.ui.pipeline.transform.common.TextFileLineUtil; /** Read a simple CSV file Just output Strings found in the file... */ public class CsvInput extends BaseTransform<CsvInputMeta, CsvInputData> { private static final Class<?> PKG = CsvInput.class; public CsvInput( TransformMeta transformMeta, CsvInputMeta meta, CsvInputData data, int copyNr, PipelineMeta pipelineMeta, Pipeline pipeline) { super(transformMeta, meta, data, copyNr, pipelineMeta, pipeline); } @Override public boolean processRow() throws HopException { if (first) { first = false; data.outputRowMeta = new RowMeta(); meta.getFields(data.outputRowMeta, getTransformName(), null, null, this, metadataProvider); if (data.filenames == null) { // We're expecting the list of filenames from the previous transform(s)... // getFilenamesFromPreviousTransforms(); } // We only run in parallel if we have at least one file to process // AND if we have more than one transform copy running... // data.parallel = meta.isRunningInParallel() && data.totalNumberOfTransforms > 1; // The conversion logic for when the lazy conversion is turned of is simple: // Pretend it's a lazy conversion object anyway and get the native type during conversion. // data.convertRowMeta = data.outputRowMeta.clone(); for (IValueMeta valueMeta : data.convertRowMeta.getValueMetaList()) { valueMeta.setStorageType(IValueMeta.STORAGE_TYPE_BINARY_STRING); } // Calculate the indexes for the filename and row number fields // data.filenameFieldIndex = -1; if (!Utils.isEmpty(meta.getFilenameField()) && meta.isIncludingFilename()) { data.filenameFieldIndex = meta.getInputFields().length; } data.rownumFieldIndex = -1; if (!Utils.isEmpty(meta.getRowNumField())) { data.rownumFieldIndex = meta.getInputFields().length; if (data.filenameFieldIndex >= 0) { data.rownumFieldIndex++; } } // Now handle the parallel reading aspect: determine total of all the file sizes // Then skip to the appropriate file and location in the file to start reading... // Also skip to right after the first newline // if (data.parallel) { prepareToRunInParallel(); } // Open the next file... // if (!openNextFile()) { setOutputDone(); return false; // nothing to see here, move along... } } // If we are running in parallel, make sure we don't read too much in this transform copy... // if (data.parallel && data.totalBytesRead >= data.blockToRead) { setOutputDone(); // stop reading return false; } try { Object[] outputRowData = readOneRow(false, false); // get row, set busy! // no more input to be expected... if (outputRowData == null) { if (openNextFile()) { return true; // try again on the next loop... } else { setOutputDone(); // last file, end here return false; } } else { putRow(data.outputRowMeta, outputRowData); // copy row to possible alternate rowset(s). if (checkFeedback(getLinesInput()) && isBasic()) { logBasic( BaseMessages.getString( PKG, "CsvInput.Log.LineNumber", Long.toString(getLinesInput()))); } } } catch (HopConversionException e) { if (getTransformMeta().isDoingErrorHandling()) { StringBuilder errorDescriptions = new StringBuilder(100); StringBuilder errorFields = new StringBuilder(50); for (int i = 0; i < e.getCauses().size(); i++) { if (i > 0) { errorDescriptions.append(", "); errorFields.append(", "); } errorDescriptions.append(e.getCauses().get(i).getMessage()); errorFields.append(e.getFields().get(i).toStringMeta()); } putError( data.outputRowMeta, e.getRowData(), e.getCauses().size(), errorDescriptions.toString(), errorFields.toString(), "CSVINPUT001"); } else { // Only forward the first cause. // throw new HopException(e.getMessage(), e.getCauses().get(0)); } } return true; } public void prepareToRunInParallel() throws HopException { try { // At this point it doesn't matter if we have 1 or more files. // We'll use the same algorithm... // for (String filename : data.filenames) { long size = HopVfs.getFileObject(filename, variables).getContent().getSize(); data.fileSizes.add(size); data.totalFileSize += size; } // Now we can determine the range to read. // // For example, the total file size is 50000, spread over 5 files of 10000 // Suppose we have 2 transform copies running (clustered or not) // That means transform 0 has to read 0-24999 and transform 1 has to read 25000-49999 // // The size of the block to read (25000 in the example) : // data.blockToRead = Math.round((double) data.totalFileSize / (double) data.totalNumberOfTransforms); // Now we calculate the position to read (0 and 25000 in our sample) : // data.startPosition = data.blockToRead * data.transformNumber; data.endPosition = data.startPosition + data.blockToRead; // Determine the start file number (0 or 2 in our sample) : // >0<,1000,>2000<,3000,4000 // long totalFileSize = 0L; for (int i = 0; i < data.fileSizes.size(); i++) { long size = data.fileSizes.get(i); // Start of file range: totalFileSize // End of file range: totalFileSize+size if (data.startPosition >= totalFileSize && data.startPosition < totalFileSize + size) { // This is the file number to start reading from... // data.filenr = i; // remember where we started to read to allow us to know that we have to skip the header // row in the next files // (if any) // data.startFilenr = i; // How many bytes do we skip in that first file? // if (data.startPosition == 0) { data.bytesToSkipInFirstFile = 0L; } else { data.bytesToSkipInFirstFile = data.startPosition - totalFileSize; } break; } totalFileSize += size; } if (data.filenames.length > 0) { logBasic( BaseMessages.getString( PKG, "CsvInput.Log.ParallelFileNrAndPositionFeedback", data.filenames[data.filenr], Long.toString(data.fileSizes.get(data.filenr)), Long.toString(data.bytesToSkipInFirstFile), Long.toString(data.blockToRead))); } } catch (Exception e) { throw new HopException( BaseMessages.getString(PKG, "CsvInput.Exception.ErrorPreparingParallelRun"), e); } } private void getFilenamesFromPreviousTransforms() throws HopException { List<String> filenames = new ArrayList<>(); boolean firstRow = true; int index = -1; Object[] row = getRow(); while (row != null) { if (firstRow) { firstRow = false; // Get the filename field index... // String filenameField = resolve(meta.getFilenameField()); index = getInputRowMeta().indexOfValue(filenameField); if (index < 0) { throw new HopException( BaseMessages.getString( PKG, "CsvInput.Exception.FilenameFieldNotFound", filenameField)); } } String filename = getInputRowMeta().getString(row, index); filenames.add(filename); // add it to the list... row = getRow(); // Grab another row... } data.filenames = filenames.toArray(new String[filenames.size()]); logBasic( BaseMessages.getString( PKG, "CsvInput.Log.ReadingFromNrFiles", Integer.toString(data.filenames.length))); } @Override public void dispose() { try { // Close the previous file... // if (data.fc != null) { data.fc.close(); } } catch (Exception e) { logError("Error closing file channel", e); } try { if (data.fis != null) { data.fis.close(); } } catch (Exception e) { logError("Error closing file input stream", e); } super.dispose(); } private boolean openNextFile() throws HopException { try { // Close the previous file... // data.closeFile(); if (data.filenr >= data.filenames.length) { return false; } // Open the next one... // data.fieldsMapping = createFieldMapping(data.filenames[data.filenr], meta); FileObject fileObject = HopVfs.getFileObject(data.filenames[data.filenr], variables); if (!(fileObject instanceof LocalFile)) { // We can only use NIO on local files at the moment, so that's what we limit ourselves to. // throw new HopException( BaseMessages.getString(PKG, "CsvInput.Log.OnlyLocalFilesAreSupported")); } if (meta.isLazyConversionActive()) { data.binaryFilename = data.filenames[data.filenr].getBytes(); } String vfsFilename = HopVfs.getFilename(fileObject); int bomSize = getBOMSize(vfsFilename); data.fis = new FileInputStream(vfsFilename); if (0 != bomSize) { data.fis.skip(bomSize); } data.fc = data.fis.getChannel(); data.bb = ByteBuffer.allocateDirect(data.preferredBufferSize); // If we are running in parallel and we need to skip bytes in the first file, let's do so // here. // if (data.parallel && data.bytesToSkipInFirstFile > 0) { data.fc.position(data.bytesToSkipInFirstFile); // evaluate whether there is a need to skip a row if (needToSkipRow()) { // when reading in parallel, the previous code would introduce additional rows and / or // invalid data in the output. // in parallel mode we don't support new lines inside field data so it's safe to fast // forward until we find a new line. // when a newline is found we need to check for an additional new line character, while // in unix systems it's just a single '\n', // on windows systems, it's a sequence of '\r' and '\n'. finally we set the start of the // buffer to the end buffer position. while (!data.newLineFound()) { data.moveEndBufferPointer(); } data.moveEndBufferPointer(); if (data.newLineFound()) { data.moveEndBufferPointer(); } } data.setStartBuffer(data.getEndBuffer()); } // Add filename to result filenames ? if (meta.isAddResultFile()) { ResultFile resultFile = new ResultFile( ResultFile.FILE_TYPE_GENERAL, fileObject, getPipelineMeta().getName(), toString()); resultFile.setComment("File was read by a Csv input transform"); addResultFile(resultFile); } // Move to the next filename // data.filenr++; // See if we need to skip a row... // - If you have a header row checked and if you're not running in parallel // - If you're running in parallel, if a header row is checked, if you're at the beginning of // a file // if (meta.isHeaderPresent()) { // Standard flat file : skip header if (!data.parallel || data.bytesToSkipInFirstFile <= 0) { readOneRow(true, false); // skip this row. logBasic( BaseMessages.getString( PKG, "CsvInput.Log.HeaderRowSkipped", data.filenames[data.filenr - 1])); if (data.fieldsMapping.size() == 0) { return false; } } } // Reset the row number pointer... // data.rowNumber = 1L; // Don't skip again in the next file... // data.bytesToSkipInFirstFile = -1L; return true; } catch (HopException e) { throw e; } catch (Exception e) { throw new HopException(e); } } protected int getBOMSize(String vfsFilename) throws Exception { int bomSize = 0; try (FileInputStream fis = new FileInputStream(vfsFilename); BufferedInputStream bis = new BufferedInputStream(fis)) { BOMDetector bom = new BOMDetector(bis); if (bom.bomExist()) { bomSize = bom.getBomSize(); } } return bomSize; } IFieldsMapping createFieldMapping(String fileName, CsvInputMeta csvInputMeta) throws HopException { IFieldsMapping mapping = null; if (csvInputMeta.isHeaderPresent()) { String[] fieldNames = readFieldNamesFromFile(fileName, csvInputMeta); mapping = NamedFieldsMapping.mapping(fieldNames, fieldNames(csvInputMeta)); } else { int fieldsCount = csvInputMeta.getInputFields() == null ? 0 : csvInputMeta.getInputFields().length; mapping = UnnamedFieldsMapping.mapping(fieldsCount); } return mapping; } String[] readFieldNamesFromFile(String fileName, CsvInputMeta csvInputMeta) throws HopException { String delimiter = resolve(csvInputMeta.getDelimiter()); String enclosure = resolve(csvInputMeta.getEnclosure()); String realEncoding = resolve(csvInputMeta.getEncoding()); try (FileObject fileObject = HopVfs.getFileObject(fileName, variables); BOMInputStream inputStream = new BOMInputStream( HopVfs.getInputStream(fileObject), ByteOrderMark.UTF_8, ByteOrderMark.UTF_16LE, ByteOrderMark.UTF_16BE)) { InputStreamReader reader = null; if (Utils.isEmpty(realEncoding)) { reader = new InputStreamReader(inputStream); } else { reader = new InputStreamReader(inputStream, realEncoding); } EncodingType encodingType = EncodingType.guessEncodingType(reader.getEncoding()); String line = TextFileInput.getLine( getLogChannel(), reader, encodingType, TextFileInputMeta.FILE_FORMAT_UNIX, new StringBuilder(1000)); String[] fieldNames = TextFileLineUtil.guessStringsFromLine( getLogChannel(), line, delimiter, enclosure, csvInputMeta.getEscapeCharacter()); if (!Utils.isEmpty(csvInputMeta.getEnclosure())) { removeEnclosure(fieldNames, csvInputMeta.getEnclosure()); } trimFieldNames(fieldNames); return fieldNames; } catch (IOException e) { throw new HopFileException( BaseMessages.getString(PKG, "CsvInput.Exception.CreateFieldMappingError"), e); } } static String[] fieldNames(CsvInputMeta csvInputMeta) { TextFileInputField[] fields = csvInputMeta.getInputFields(); String[] fieldNames = new String[fields.length]; for (int i = 0; i < fields.length; i++) { // TODO: We need to sanitize field names because existing ktr files may contain field names // with leading BOM fieldNames[i] = fields[i].getName(); } return fieldNames; } static void trimFieldNames(String[] strings) { if (strings != null) { for (int i = 0; i < strings.length; i++) { strings[i] = strings[i].trim(); } } } static void removeEnclosure(String[] fields, String enclosure) { if (fields != null) { for (int i = 0; i < fields.length; i++) { if (fields[i].startsWith(enclosure) && fields[i].endsWith(enclosure) && fields[i].length() > 1) { fields[i] = fields[i].substring(1, fields[i].length() - 1); } } } } /** * We need to skip row only if a line, that we are currently on is read by the previous transform * <b>partly</b>. In other words, we DON'T skip a line if we are just beginning to read it from * the first symbol. We have to do some work for this: read last byte from the previous transform * and make sure that it is a new line byte. But it's not enough. There could be a situation, * where new line is indicated by '\r\n' construction. And if we are <b>between</b> this * construction, we want to skip last '\n', and don't want to include it in our line. * * <p>So, we DON'T skip line only if the previous char is new line indicator AND we are not * between '\r\n'. */ private boolean needToSkipRow() { try { // first we move pointer to the last byte of the previous transform data.fc.position(data.fc.position() - 1); // read data, if not yet data.resizeBufferIfNeeded(); // check whether the last symbol from the previous transform is a new line if (data.newLineFound()) { // don't increase bytes read for this transform, as it is actually content of another // transform // and we are reading this just for evaluation. data.moveEndBufferPointer(false); // now we are at the first char of our thread. // there is still a situation we want to avoid: when there is a windows style "/r/n", and we // are between two // of this chars. In this case we need to skip a line. Otherwise we don't skip it. return data.newLineFound(); } else { // moving to the first char of our line. data.moveEndBufferPointer(false); } } catch (IOException e) { e.printStackTrace(); } finally { try { data.fc.position(data.fc.position() + 1); } catch (IOException e) { // nothing to do here } } return true; } /** * Read a single row of data from the file... * * @param skipRow if row should be skipped: header row or part of row in case of parallel read * @param ignoreEnclosures if enclosures should be ignored, i.e. in case of we need to skip part * of the row during parallel read * @return a row of data... * @throws HopException */ private Object[] readOneRow(boolean skipRow, boolean ignoreEnclosures) throws HopException { try { Object[] outputRowData = RowDataUtil.allocateRowData(data.outputRowMeta.size()); int outputIndex = 0; boolean newLineFound = false; boolean endOfBuffer = false; List<Exception> conversionExceptions = null; List<IValueMeta> exceptionFields = null; // The strategy is as follows... // We read a block of byte[] from the file. // We scan for the separators in the file (NOT for line feeds etc) // Then we scan that block of data. // We keep a byte[] that we extend if needed.. // At the end of the block we read another, etc. // // Let's start by looking where we left off reading. // while (!newLineFound && outputIndex < meta.getInputFields().length) { if (data.resizeBufferIfNeeded()) { // Last row was being discarded if the last item is null and // there is no end of line delimiter if (outputRowData != null && outputIndex > 0) { // Make certain that at least one record exists before // filling the rest of them with null // Optionally add the current filename to the mix as well... // if (meta.isIncludingFilename() && !Utils.isEmpty(meta.getFilenameField())) { if (meta.isLazyConversionActive()) { outputRowData[data.filenameFieldIndex] = data.binaryFilename; } else { outputRowData[data.filenameFieldIndex] = data.filenames[data.filenr - 1]; } } if (data.isAddingRowNumber) { outputRowData[data.rownumFieldIndex] = data.rowNumber++; } incrementLinesInput(); return outputRowData; } return null; // nothing more to read, call it a day. } // OK, at this point we should have data in the byteBuffer and we should be able to scan for // the next // delimiter (;) // So let's look for a delimiter. // Also skip over the enclosures ("), it is NOT taking into account escaped enclosures. // Later we can add an option for having escaped or double enclosures in the file. <sigh> // boolean delimiterFound = false; boolean enclosureFound = false; boolean doubleLineEnd = false; int escapedEnclosureFound = 0; boolean ignoreEnclosuresInField = ignoreEnclosures; while (!delimiterFound && !newLineFound && !endOfBuffer) { // If we find the first char, we might find others as well ;-) // Single byte delimiters only for now. // if (data.delimiterFound()) { delimiterFound = true; } else if ((!meta.isNewlinePossibleInFields() || outputIndex == data.fieldsMapping.size() - 1) && data.newLineFound()) { // Perhaps we found a (pre-mature) new line? // // In case we are not using an enclosure and in case fields contain new lines // we need to make sure that we check the newlines possible flag. // If the flag is enable we skip newline checking except for the last field in the row. // In that one we can't support newlines without enclosure (handled below). // newLineFound = true; // Skip new line character for (int i = 0; i < data.encodingType.getLength(); i++) { data.moveEndBufferPointer(); } // Re-check for double new line (\r\n)... if (data.newLineFound()) { // Found another one, need to skip it later doubleLineEnd = true; } } else if (data.enclosureFound() && !ignoreEnclosuresInField) { int enclosurePosition = data.getEndBuffer(); int fieldFirstBytePosition = data.getStartBuffer(); if (fieldFirstBytePosition == enclosurePosition) { // Perhaps we need to skip over an enclosed part? // We always expect exactly one enclosure character // If we find the enclosure doubled, we consider it escaped. // --> "" is converted to " later on. // enclosureFound = true; boolean keepGoing; do { if (data.moveEndBufferPointer()) { enclosureFound = false; break; } keepGoing = !data.enclosureFound(); if (!keepGoing) { // We found an enclosure character. // Read another byte... if (!data.endOfBuffer() && data.moveEndBufferPointer()) { break; } if (data.enclosure.length > 1) { data.moveEndBufferPointer(); } // If this character is also an enclosure, we can consider the enclosure // "escaped". // As such, if this is an enclosure, we keep going... // keepGoing = data.enclosureFound(); if (keepGoing) { escapedEnclosureFound++; } } } while (keepGoing); // Did we reach the end of the buffer? // if (data.endOfBuffer()) { endOfBuffer = true; break; } } else { // Ignoring enclosure if it's not at the field start ignoreEnclosuresInField = true; } } else { if (data.moveEndBufferPointer()) { endOfBuffer = true; break; } } } // If we're still here, we found a delimiter... // Since the starting point never changed really, we just can grab range: // // [startBuffer-endBuffer[ // // This is the part we want. // data.byteBuffer[data.startBuffer] // byte[] field = data.getField(delimiterFound, enclosureFound, newLineFound, endOfBuffer); // Did we have any escaped characters in there? // if (escapedEnclosureFound > 0) { if (isRowLevel()) { logRowlevel("Escaped enclosures found in " + new String(field)); } field = data.removeEscapedEnclosures(field, escapedEnclosureFound); } final int actualFieldIndex = outputIndex++; if (actualFieldIndex != IFieldsMapping.FIELD_DOES_NOT_EXIST) { if (!skipRow) { if (meta.isLazyConversionActive()) { outputRowData[actualFieldIndex] = field; } else { // We're not lazy so we convert the data right here and now. // The convert object uses binary storage as such we just have to ask the native type // from it. // That will do the actual conversion. // IValueMeta sourceValueMeta = data.convertRowMeta.getValueMeta(actualFieldIndex); try { outputRowData[actualFieldIndex] = sourceValueMeta.convertBinaryStringToNativeType(field); } catch (HopValueException e) { // There was a conversion error, // outputRowData[actualFieldIndex] = null; if (conversionExceptions == null) { conversionExceptions = new ArrayList<>(); exceptionFields = new ArrayList<>(); } conversionExceptions.add(e); exceptionFields.add(sourceValueMeta); } } } else { outputRowData[actualFieldIndex] = null; // nothing for the header, no conversions here. } } // OK, move on to the next field... // Before we increment, we should check to see if the while condition is about to fail. // this will prevent the endBuffer from being incremented twice (once by this block and once // in the // do-while loop below) and possibly skipping a newline character. This can occur if there // is an // empty column at the end of the row (see the Jira case for details) if ((!newLineFound && outputIndex < meta.getInputFields().length) || (newLineFound && doubleLineEnd)) { int i = 0; while ((!data.newLineFound() && (i < data.delimiter.length))) { data.moveEndBufferPointer(); i++; } if (data.isCarriageReturn() || doubleLineEnd) { data.moveEndBufferPointerXTimes(data.encodingType.getLength()); } } data.setStartBuffer(data.getEndBuffer()); } // See if we reached the end of the line. // If not, we need to skip the remaining items on the line until the next newline... // if (!newLineFound && !data.resizeBufferIfNeeded()) { do { data.moveEndBufferPointer(); if (data.resizeBufferIfNeeded()) { break; // nothing more to read. } // TODO: if we're using quoting we might be dealing with a very dirty file with quoted // newlines in trailing // fields. (imagine that) // In that particular case we want to use the same logic we use above (refactored a bit) // to skip these fields. } while (!data.newLineFound()); if (!data.resizeBufferIfNeeded()) { while (data.newLineFound()) { data.moveEndBufferPointer(); if (data.resizeBufferIfNeeded()) { break; // nothing more to read. } } } // Make sure we start at the right position the next time around. data.setStartBuffer(data.getEndBuffer()); } // Optionally add the current filename to the mix as well... // if (meta.isIncludingFilename() && !Utils.isEmpty(meta.getFilenameField())) { if (meta.isLazyConversionActive()) { outputRowData[data.filenameFieldIndex] = data.binaryFilename; } else { outputRowData[data.filenameFieldIndex] = data.filenames[data.filenr - 1]; } } if (data.isAddingRowNumber) { outputRowData[data.rownumFieldIndex] = data.rowNumber++; } if (!ignoreEnclosures) { incrementLinesInput(); } if (!Utils.isEmpty(conversionExceptions)) { // Forward the first exception // throw new HopConversionException( "There were " + conversionExceptions.size() + " conversion errors on line " + getLinesInput(), conversionExceptions, exceptionFields, outputRowData); } return outputRowData; } catch (HopConversionException e) { throw e; } catch (IOException e) { throw new HopFileException("Exception reading line using NIO", e); } } @Override public boolean init() { if (super.init()) { // see if a variable is used as encoding value String realEncoding = resolve(meta.getEncoding()); data.preferredBufferSize = Integer.parseInt(resolve(meta.getBufferSize())); // If the transform doesn't have any previous transforms, we just get the filename. // Otherwise, we'll grab the list of file names later... // if (getPipelineMeta().findPreviousTransforms(getTransformMeta()).isEmpty()) { String filename = resolve(meta.getFilename()); if (Utils.isEmpty(filename)) { logError(BaseMessages.getString(PKG, "CsvInput.MissingFilename.Message")); return false; } data.filenames = new String[] { filename, }; } else { data.filenames = null; data.filenr = 0; } data.totalBytesRead = 0L; data.encodingType = EncodingType.guessEncodingType(realEncoding); // set the delimiter byte value to the code point of the // character as represented in the input file's encoding try { data.delimiter = data.encodingType.getBytes(resolve(meta.getDelimiter()), realEncoding); if (Utils.isEmpty(meta.getEnclosure())) { data.enclosure = null; } else { data.enclosure = data.encodingType.getBytes(resolve(meta.getEnclosure()), realEncoding); } } catch (UnsupportedEncodingException e) { logError(BaseMessages.getString(PKG, "CsvInput.BadEncoding.Message"), e); return false; } data.isAddingRowNumber = !Utils.isEmpty(meta.getRowNumField()); // Handle parallel reading capabilities... // data.stopReading = false; if (meta.isRunningInParallel()) { data.transformNumber = getCopyNr(); data.totalNumberOfTransforms = getTransformMeta().getCopies(this); // We are not handling a single file, but possibly a list of files... // As such, the fair thing to do is calculate the total size of the files // Then read the required block. // data.fileSizes = new ArrayList<>(); data.totalFileSize = 0L; } // Set the most efficient pattern matcher to match the delimiter. // if (data.delimiter.length == 1) { data.delimiterMatcher = new SingleBytePatternMatcher(); } else { data.delimiterMatcher = new MultiBytePatternMatcher(); } // Set the most efficient pattern matcher to match the enclosure. // if (data.enclosure == null) { data.enclosureMatcher = new EmptyPatternMatcher(); } else { if (data.enclosure.length == 1) { data.enclosureMatcher = new SingleBytePatternMatcher(); } else { data.enclosureMatcher = new MultiBytePatternMatcher(); } } switch (data.encodingType) { case DOUBLE_BIG_ENDIAN: data.crLfMatcher = new MultiByteBigCrLfMatcher(); break; case DOUBLE_LITTLE_ENDIAN: data.crLfMatcher = new MultiByteLittleCrLfMatcher(); break; default: data.crLfMatcher = new SingleByteCrLfMatcher(); break; } return true; } return false; } }
apache/james-project
35,393
backends-common/cassandra/src/main/java/org/apache/james/backends/cassandra/init/configuration/CassandraConfiguration.java
/**************************************************************** * Licensed to the Apache Software Foundation (ASF) under one * * or more contributor license agreements. See the NOTICE file * * distributed with this work for additional information * * regarding copyright ownership. The ASF licenses this file * * to you under the Apache License, Version 2.0 (the * * "License"); you may not use this file except in compliance * * with the License. You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, * * software distributed under the License is distributed on an * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * * KIND, either express or implied. See the License for the * * specific language governing permissions and limitations * * under the License. * ****************************************************************/ package org.apache.james.backends.cassandra.init.configuration; import static java.lang.Math.toIntExact; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.concurrent.TimeUnit; import java.util.function.Predicate; import org.apache.commons.configuration2.Configuration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.MoreObjects; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; public class CassandraConfiguration { private static final Logger LOGGER = LoggerFactory.getLogger(CassandraConfiguration.class); public static final float DEFAULT_MAILBOX_READ_REPAIR = 0.1f; public static final float DEFAULT_MAX_MAILBOX_COUNTERS_READ_REPAIR_CHANCE = 0.1f; public static final float DEFAULT_ONE_HUNDRED_MAILBOX_COUNTERS_READ_REPAIR_CHANCE = 0.01f; public static final int DEFAULT_EXPUNGE_BATCH_SIZE = 50; public static final int DEFAULT_FLAGS_UPDATE_MESSAGE_MAX_RETRY = 1000; public static final int DEFAULT_FLAGS_UPDATE_MESSAGE_ID_MAX_RETRY = 1000; public static final int DEFAULT_MODSEQ_MAX_RETRY = 100000; public static final int DEFAULT_UID_MAX_RETRY = 100000; public static final int DEFAULT_ACL_MAX_RETRY = 1000; public static final int DEFAULT_FETCH_NEXT_PAGE_ADVANCE_IN_ROW = 100; public static final int DEFAULT_BLOB_PART_SIZE = 100 * 1024; public static final int DEFAULT_ATTACHMENT_V2_MIGRATION_READ_TIMEOUT = toIntExact(TimeUnit.HOURS.toMillis(1)); public static final int DEFAULT_MESSAGE_ATTACHMENT_ID_MIGRATION_READ_TIMEOUT = toIntExact(TimeUnit.HOURS.toMillis(1)); public static final String DEFAULT_CONSISTENCY_LEVEL_REGULAR = "QUORUM"; public static final String DEFAULT_CONSISTENCY_LEVEL_LIGHTWEIGHT_TRANSACTION = "SERIAL"; public static final List<String> VALID_CONSISTENCY_LEVEL_REGULAR = ImmutableList.of("QUORUM", "LOCAL_QUORUM", "EACH_QUORUM"); public static final List<String> VALID_CONSISTENCY_LEVEL_LIGHTWEIGHT_TRANSACTION = ImmutableList.of("SERIAL", "LOCAL_SERIAL"); public static final boolean DEFAULT_STRONG_CONSISTENCY = true; public static final boolean DEFAULT_OPTIMISTIC_CONSISTENCY_LEVEL = false; public static final boolean DEFAULT_MAIL_REPOSITORY_STRONG_CONSISTENCY = true; private static final String MAILBOX_READ_REPAIR = "mailbox.read.repair.chance"; private static final String MAILBOX_MAX_COUNTERS_READ_REPAIR = "mailbox.counters.read.repair.chance.max"; private static final String MAILBOX_ONE_HUNDRED_COUNTERS_READ_REPAIR = "mailbox.counters.read.repair.chance.one.hundred"; private static final String MAILBOX_MAX_RETRY_ACL = "mailbox.max.retry.acl"; private static final String MAILBOX_MAX_RETRY_MODSEQ = "mailbox.max.retry.modseq"; private static final String MAILBOX_MAX_RETRY_UID = "mailbox.max.retry.uid"; private static final String MAILBOX_MAX_RETRY_MESSAGE_FLAGS_UPDATE = "mailbox.max.retry.message.flags.update"; private static final String MAILBOX_MAX_RETRY_MESSAGE_ID_FLAGS_UPDATE = "mailbox.max.retry.message.id.flags.update"; private static final String FETCH_ADVANCE_ROW_COUNT = "fetch.advance.row.count"; private static final String CHUNK_SIZE_EXPUNGE = "chunk.size.expunge"; private static final String BLOB_PART_SIZE = "mailbox.blob.part.size"; private static final String ATTACHMENT_V2_MIGRATION_READ_TIMEOUT = "attachment.v2.migration.read.timeout"; private static final String MESSAGE_ATTACHMENTID_READ_TIMEOUT = "message.attachmentids.read.timeout"; private static final String MAILBOX_READ_STRONG_CONSISTENCY = "mailbox.read.strong.consistency"; private static final String MESSAGE_READ_STRONG_CONSISTENCY = "message.read.strong.consistency"; private static final String MESSAGE_WRITE_STRONG_CONSISTENCY = "message.write.strong.consistency.unsafe"; private static final String UID_READ_STRONG_CONSISTENCY = "uid.read.strong.consistency.unsafe"; private static final String MODSEQ_READ_STRONG_CONSISTENCY = "modseq.read.strong.consistency.unsafe"; private static final String CONSISTENCY_LEVEL_REGULAR = "cassandra.consistency_level.regular"; private static final String CONSISTENCY_LEVEL_LIGHTWEIGHT_TRANSACTION = "cassandra.consistency_level.lightweight_transaction"; private static final String OPTIMISTIC_CONSISTENCY_LEVEL = "optimistic.consistency.level.enabled"; private static final String MAIL_REPOSITORY_STRONG_CONSISTENCY = "mailrepository.strong.consistency"; private static final String ACL_ENABLED = "acl.enabled"; private static final String UID_MODSEQ_INCREMENT = "uid.modseq.increment"; public static final CassandraConfiguration DEFAULT_CONFIGURATION = builder().build(); public static class Builder { private Optional<Integer> expungeChunkSize = Optional.empty(); private Optional<Integer> flagsUpdateMessageIdMaxRetry = Optional.empty(); private Optional<Integer> flagsUpdateMessageMaxRetry = Optional.empty(); private Optional<Integer> modSeqMaxRetry = Optional.empty(); private Optional<Integer> uidMaxRetry = Optional.empty(); private Optional<Integer> aclMaxRetry = Optional.empty(); private Optional<Integer> fetchNextPageInAdvanceRow = Optional.empty(); private Optional<Integer> blobPartSize = Optional.empty(); private Optional<Integer> attachmentV2MigrationReadTimeout = Optional.empty(); private Optional<Integer> messageAttachmentIdsReadTimeout = Optional.empty(); private Optional<String> consistencyLevelRegular = Optional.empty(); private Optional<String> consistencyLevelLightweightTransaction = Optional.empty(); private Optional<Float> mailboxReadRepair = Optional.empty(); private Optional<Float> mailboxCountersReadRepairMax = Optional.empty(); private Optional<Float> mailboxCountersReadRepairChanceOneHundred = Optional.empty(); private Optional<Boolean> mailboxReadStrongConsistency = Optional.empty(); private Optional<Boolean> messageReadStrongConsistency = Optional.empty(); private Optional<Boolean> messageWriteStrongConsistency = Optional.empty(); private Optional<Boolean> uidReadStrongConsistency = Optional.empty(); private Optional<Boolean> modseqReadStrongConsistency = Optional.empty(); private Optional<Boolean> optimisticConsistencyLevel = Optional.empty(); private Optional<Boolean> mailRepositoryStrongConsistency = Optional.empty(); private Optional<Boolean> aclEnabled = Optional.empty(); private Optional<Integer> uidModseqIncrement = Optional.empty(); public Builder mailboxReadStrongConsistency(boolean value) { this.mailboxReadStrongConsistency = Optional.of(value); return this; } public Builder mailboxReadStrongConsistency(Optional<Boolean> value) { this.mailboxReadStrongConsistency = value; return this; } public Builder aclEnabled(Optional<Boolean> value) { this.aclEnabled = value; return this; } public Builder messageReadStrongConsistency(boolean value) { this.messageReadStrongConsistency = Optional.of(value); return this; } public Builder messageReadStrongConsistency(Optional<Boolean> value) { this.messageReadStrongConsistency = value; return this; } public Builder uidReadStrongConsistency(Optional<Boolean> value) { this.uidReadStrongConsistency = value; return this; } public Builder modseqReadStrongConsistency(Optional<Boolean> value) { this.modseqReadStrongConsistency = value; return this; } public Builder messageWriteStrongConsistency(boolean value) { this.messageWriteStrongConsistency = Optional.of(value); return this; } public Builder messageWriteStrongConsistency(Optional<Boolean> value) { this.messageWriteStrongConsistency = value; return this; } public Builder expungeChunkSize(int value) { Preconditions.checkArgument(value > 0, "expungeChunkSize needs to be strictly positive"); this.expungeChunkSize = Optional.of(value); return this; } public Builder uidModseqIncrement(int value) { Preconditions.checkArgument(value > 0, "uidModseqIncrement needs to be strictly positive"); this.uidModseqIncrement = Optional.of(value); return this; } public Builder uidModseqIncrement(Optional<Integer> value) { value.ifPresent(this::uidModseqIncrement); return this; } public Builder flagsUpdateMessageIdMaxRetry(int value) { Preconditions.checkArgument(value > 0, "flagsUpdateMessageIdMaxRetry needs to be strictly positive"); this.flagsUpdateMessageIdMaxRetry = Optional.of(value); return this; } public Builder flagsUpdateMessageMaxRetry(int value) { Preconditions.checkArgument(value > 0, "flagsUpdateMessageMaxRetry needs to be strictly positive"); this.flagsUpdateMessageMaxRetry = Optional.of(value); return this; } public Builder modSeqMaxRetry(int value) { Preconditions.checkArgument(value > 0, "modSeqMaxRetry needs to be strictly positive"); this.modSeqMaxRetry = Optional.of(value); return this; } public Builder uidMaxRetry(int value) { Preconditions.checkArgument(value > 0, "uidMaxRetry needs to be strictly positive"); this.uidMaxRetry = Optional.of(value); return this; } public Builder aclMaxRetry(int value) { Preconditions.checkArgument(value > 0, "aclMaxRetry needs to be strictly positive"); this.aclMaxRetry = Optional.of(value); return this; } public Builder fetchNextPageInAdvanceRow(int value) { Preconditions.checkArgument(value > 0, "fetchNextPageInAdvanceRow needs to be strictly positive"); this.fetchNextPageInAdvanceRow = Optional.of(value); return this; } public Builder blobPartSize(int value) { Preconditions.checkArgument(value > 0, "blobPartSize needs to be strictly positive"); this.blobPartSize = Optional.of(value); return this; } public Builder attachmentV2MigrationReadTimeout(int value) { Preconditions.checkArgument(value > 0, "attachmentV2MigrationReadTimeout needs to be strictly positive"); this.attachmentV2MigrationReadTimeout = Optional.of(value); return this; } public Builder messageAttachmentIdsReadTimeout(int value) { Preconditions.checkArgument(value > 0, "messageAttachmentIdsReadTimeout needs to be strictly positive"); this.messageAttachmentIdsReadTimeout = Optional.of(value); return this; } public Builder mailboxReadRepair(float value) { Preconditions.checkArgument(value >= 0, "mailboxReadRepair needs to be positive"); Preconditions.checkArgument(value <= 1, "mailboxReadRepair needs to be less or equal to 1"); this.mailboxReadRepair = Optional.of(value); return this; } public Builder mailboxCountersReadRepairMax(float value) { Preconditions.checkArgument(value >= 0, "mailboxCountersReadRepairMax needs to be positive"); Preconditions.checkArgument(value <= 1, "mailboxCountersReadRepairMax needs to be less or equal to 1"); this.mailboxCountersReadRepairMax = Optional.of(value); return this; } public Builder mailboxCountersReadRepairChanceOneHundred(float value) { Preconditions.checkArgument(value >= 0, "mailboxCountersReadRepairChanceOneHundred needs to be positive"); Preconditions.checkArgument(value <= 1, "mailboxCountersReadRepairChanceOneHundred needs to be less or equal to 1"); this.mailboxCountersReadRepairChanceOneHundred = Optional.of(value); return this; } public Builder expungeChunkSize(Optional<Integer> value) { value.ifPresent(this::expungeChunkSize); return this; } public Builder flagsUpdateMessageIdMaxRetry(Optional<Integer> value) { value.ifPresent(this::flagsUpdateMessageIdMaxRetry); return this; } public Builder flagsUpdateMessageMaxRetry(Optional<Integer> value) { value.ifPresent(this::flagsUpdateMessageMaxRetry); return this; } public Builder modSeqMaxRetry(Optional<Integer> value) { value.ifPresent(this::modSeqMaxRetry); return this; } public Builder uidMaxRetry(Optional<Integer> value) { value.ifPresent(this::uidMaxRetry); return this; } public Builder aclMaxRetry(Optional<Integer> value) { value.ifPresent(this::aclMaxRetry); return this; } public Builder fetchNextPageInAdvanceRow(Optional<Integer> value) { value.ifPresent(this::fetchNextPageInAdvanceRow); return this; } public Builder blobPartSize(Optional<Integer> value) { value.ifPresent(this::blobPartSize); return this; } public Builder attachmentV2MigrationReadTimeout(Optional<Integer> value) { value.ifPresent(this::attachmentV2MigrationReadTimeout); return this; } public Builder messageAttachmentIdsReadTimeout(Optional<Integer> value) { value.ifPresent(this::messageAttachmentIdsReadTimeout); return this; } public Builder mailboxReadRepair(Optional<Float> value) { value.ifPresent(this::mailboxReadRepair); return this; } public Builder mailboxCountersReadRepairMax(Optional<Float> value) { value.ifPresent(this::mailboxCountersReadRepairMax); return this; } public Builder mailboxCountersReadRepairChanceOneHundred(Optional<Float> value) { value.ifPresent(this::mailboxCountersReadRepairChanceOneHundred); return this; } public Builder consistencyLevelRegular(String value) { Preconditions.checkArgument(VALID_CONSISTENCY_LEVEL_REGULAR.contains(value), "consistencyLevelRegular needs to be one of the following: " + String.join(", ", VALID_CONSISTENCY_LEVEL_REGULAR)); this.consistencyLevelRegular = Optional.of(value); return this; } public Builder consistencyLevelLightweightTransaction(String value) { Preconditions.checkArgument(VALID_CONSISTENCY_LEVEL_LIGHTWEIGHT_TRANSACTION.contains(value), "consistencyLevelLightweightTransaction needs to be one of the following: " + String.join(", ", VALID_CONSISTENCY_LEVEL_LIGHTWEIGHT_TRANSACTION)); this.consistencyLevelLightweightTransaction = Optional.of(value); return this; } public Builder consistencyLevelRegular(Optional<String> value) { value.ifPresent(this::consistencyLevelRegular); return this; } public Builder consistencyLevelLightweightTransaction(Optional<String> value) { value.ifPresent(this::consistencyLevelLightweightTransaction); return this; } public Builder optimisticConsistencyLevel(boolean value) { this.optimisticConsistencyLevel = Optional.of(value); return this; } public Builder optimisticConsistencyLevel(Optional<Boolean> value) { this.optimisticConsistencyLevel = value; return this; } public Builder mailRepositoryStrongConsistency(Optional<Boolean> value) { this.mailRepositoryStrongConsistency = value; return this; } public CassandraConfiguration build() { String consistencyLevelRegular = this.consistencyLevelRegular.orElse(DEFAULT_CONSISTENCY_LEVEL_REGULAR); String consistencyLevelLightweightTransaction = this.consistencyLevelLightweightTransaction.orElse(DEFAULT_CONSISTENCY_LEVEL_LIGHTWEIGHT_TRANSACTION); Predicate<String> isLocal = consistencyLevel -> consistencyLevel.startsWith("LOCAL_"); if (isLocal.test(consistencyLevelRegular) != isLocal.test(consistencyLevelLightweightTransaction)) { LOGGER.warn("The consistency levels may not be properly configured, one is local and the other not: " + "regular = '{}' and lightweight transaction = '{}'", consistencyLevelRegular, consistencyLevelLightweightTransaction); } return new CassandraConfiguration(aclMaxRetry.orElse(DEFAULT_ACL_MAX_RETRY), expungeChunkSize.orElse(DEFAULT_EXPUNGE_BATCH_SIZE), flagsUpdateMessageIdMaxRetry.orElse(DEFAULT_FLAGS_UPDATE_MESSAGE_ID_MAX_RETRY), flagsUpdateMessageMaxRetry.orElse(DEFAULT_FLAGS_UPDATE_MESSAGE_MAX_RETRY), modSeqMaxRetry.orElse(DEFAULT_MODSEQ_MAX_RETRY), uidMaxRetry.orElse(DEFAULT_UID_MAX_RETRY), fetchNextPageInAdvanceRow.orElse(DEFAULT_FETCH_NEXT_PAGE_ADVANCE_IN_ROW), blobPartSize.orElse(DEFAULT_BLOB_PART_SIZE), attachmentV2MigrationReadTimeout.orElse(DEFAULT_ATTACHMENT_V2_MIGRATION_READ_TIMEOUT), messageAttachmentIdsReadTimeout.orElse(DEFAULT_MESSAGE_ATTACHMENT_ID_MIGRATION_READ_TIMEOUT), consistencyLevelRegular, consistencyLevelLightweightTransaction, mailboxReadRepair.orElse(DEFAULT_MAILBOX_READ_REPAIR), mailboxCountersReadRepairMax.orElse(DEFAULT_MAX_MAILBOX_COUNTERS_READ_REPAIR_CHANCE), mailboxCountersReadRepairChanceOneHundred.orElse(DEFAULT_ONE_HUNDRED_MAILBOX_COUNTERS_READ_REPAIR_CHANCE), mailboxReadStrongConsistency.orElse(DEFAULT_STRONG_CONSISTENCY), messageReadStrongConsistency.orElse(DEFAULT_STRONG_CONSISTENCY), messageWriteStrongConsistency.orElse(DEFAULT_STRONG_CONSISTENCY), optimisticConsistencyLevel.orElse(DEFAULT_OPTIMISTIC_CONSISTENCY_LEVEL), mailRepositoryStrongConsistency.orElse(DEFAULT_MAIL_REPOSITORY_STRONG_CONSISTENCY), uidReadStrongConsistency.orElse(DEFAULT_STRONG_CONSISTENCY), modseqReadStrongConsistency.orElse(DEFAULT_STRONG_CONSISTENCY), aclEnabled.orElse(true), uidModseqIncrement.orElse(0)); } } public static Builder builder() { return new Builder(); } public static CassandraConfiguration from(Configuration propertiesConfiguration) { return builder() .aclMaxRetry(Optional.ofNullable( propertiesConfiguration.getInteger(MAILBOX_MAX_RETRY_ACL, null))) .modSeqMaxRetry(Optional.ofNullable( propertiesConfiguration.getInteger(MAILBOX_MAX_RETRY_MODSEQ, null))) .uidMaxRetry(Optional.ofNullable( propertiesConfiguration.getInteger(MAILBOX_MAX_RETRY_UID, null))) .flagsUpdateMessageMaxRetry(Optional.ofNullable( propertiesConfiguration.getInteger(MAILBOX_MAX_RETRY_MESSAGE_FLAGS_UPDATE, null))) .flagsUpdateMessageIdMaxRetry(Optional.ofNullable( propertiesConfiguration.getInteger(MAILBOX_MAX_RETRY_MESSAGE_ID_FLAGS_UPDATE, null))) .fetchNextPageInAdvanceRow(Optional.ofNullable( propertiesConfiguration.getInteger(FETCH_ADVANCE_ROW_COUNT, null))) .expungeChunkSize(Optional.ofNullable( propertiesConfiguration.getInteger(CHUNK_SIZE_EXPUNGE, null))) .blobPartSize(Optional.ofNullable( propertiesConfiguration.getInteger(BLOB_PART_SIZE, null))) .attachmentV2MigrationReadTimeout(Optional.ofNullable( propertiesConfiguration.getInteger(ATTACHMENT_V2_MIGRATION_READ_TIMEOUT, null))) .messageAttachmentIdsReadTimeout(Optional.ofNullable( propertiesConfiguration.getInteger(MESSAGE_ATTACHMENTID_READ_TIMEOUT, null))) .consistencyLevelRegular(Optional.ofNullable( propertiesConfiguration.getString(CONSISTENCY_LEVEL_REGULAR))) .consistencyLevelLightweightTransaction(Optional.ofNullable( propertiesConfiguration.getString(CONSISTENCY_LEVEL_LIGHTWEIGHT_TRANSACTION))) .mailboxReadRepair(Optional.ofNullable( propertiesConfiguration.getFloat(MAILBOX_READ_REPAIR, null))) .mailboxCountersReadRepairMax(Optional.ofNullable( propertiesConfiguration.getFloat(MAILBOX_MAX_COUNTERS_READ_REPAIR, null))) .mailboxCountersReadRepairChanceOneHundred(Optional.ofNullable( propertiesConfiguration.getFloat(MAILBOX_ONE_HUNDRED_COUNTERS_READ_REPAIR, null))) .mailboxReadStrongConsistency(Optional.ofNullable( propertiesConfiguration.getBoolean(MAILBOX_READ_STRONG_CONSISTENCY, null))) .messageReadStrongConsistency(Optional.ofNullable( propertiesConfiguration.getBoolean(MESSAGE_READ_STRONG_CONSISTENCY, null))) .uidReadStrongConsistency(Optional.ofNullable( propertiesConfiguration.getBoolean(UID_READ_STRONG_CONSISTENCY, null))) .modseqReadStrongConsistency(Optional.ofNullable( propertiesConfiguration.getBoolean(MODSEQ_READ_STRONG_CONSISTENCY, null))) .messageWriteStrongConsistency(Optional.ofNullable( propertiesConfiguration.getBoolean(MESSAGE_WRITE_STRONG_CONSISTENCY, null))) .optimisticConsistencyLevel(Optional.ofNullable( propertiesConfiguration.getBoolean(OPTIMISTIC_CONSISTENCY_LEVEL, null))) .mailRepositoryStrongConsistency(Optional.ofNullable( propertiesConfiguration.getBoolean(MAIL_REPOSITORY_STRONG_CONSISTENCY, null))) .aclEnabled(Optional.ofNullable(propertiesConfiguration.getBoolean(ACL_ENABLED, null))) .uidModseqIncrement(Optional.ofNullable(propertiesConfiguration.getInteger(UID_MODSEQ_INCREMENT, null))) .build(); } private final int expungeChunkSize; private final int flagsUpdateMessageIdMaxRetry; private final int flagsUpdateMessageMaxRetry; private final int modSeqMaxRetry; private final int uidMaxRetry; private final int aclMaxRetry; private final int fetchNextPageInAdvanceRow; private final int blobPartSize; private final int attachmentV2MigrationReadTimeout; private final int messageAttachmentIdsReadTimeout; private final String consistencyLevelRegular; private final String consistencyLevelLightweightTransaction; private final float mailboxReadRepair; private final float mailboxCountersReadRepairChanceMax; private final float mailboxCountersReadRepairChanceOneHundred; private final boolean mailboxReadStrongConsistency; private final boolean messageReadStrongConsistency; private final boolean messageWriteStrongConsistency; private final boolean optimisticConsistencyLevel; private final boolean mailRepositoryStrongConsistency; private final boolean uidReadStrongConsistency; private final boolean modseqReadStrongConsistency; private final boolean aclEnabled; private final int uidModseqIncrement; @VisibleForTesting CassandraConfiguration(int aclMaxRetry, int expungeChunkSize, int flagsUpdateMessageIdMaxRetry, int flagsUpdateMessageMaxRetry, int modSeqMaxRetry, int uidMaxRetry, int fetchNextPageInAdvanceRow, int blobPartSize, final int attachmentV2MigrationReadTimeout, int messageAttachmentIdsReadTimeout, String consistencyLevelRegular, String consistencyLevelLightweightTransaction, float mailboxReadRepair, float mailboxCountersReadRepairChanceMax, float mailboxCountersReadRepairChanceOneHundred, boolean mailboxReadStrongConsistency, boolean messageReadStrongConsistency, boolean messageWriteStrongConsistency, boolean optimisticConsistencyLevel, boolean mailRepositoryStrongConsistency, boolean uidReadStrongConsistency, boolean modseqReadStrongConsistency, boolean aclEnabled, int uidModseqIncrement) { this.aclMaxRetry = aclMaxRetry; this.expungeChunkSize = expungeChunkSize; this.flagsUpdateMessageIdMaxRetry = flagsUpdateMessageIdMaxRetry; this.flagsUpdateMessageMaxRetry = flagsUpdateMessageMaxRetry; this.modSeqMaxRetry = modSeqMaxRetry; this.uidMaxRetry = uidMaxRetry; this.fetchNextPageInAdvanceRow = fetchNextPageInAdvanceRow; this.blobPartSize = blobPartSize; this.attachmentV2MigrationReadTimeout = attachmentV2MigrationReadTimeout; this.messageAttachmentIdsReadTimeout = messageAttachmentIdsReadTimeout; this.consistencyLevelRegular = consistencyLevelRegular; this.consistencyLevelLightweightTransaction = consistencyLevelLightweightTransaction; this.mailboxReadRepair = mailboxReadRepair; this.mailboxCountersReadRepairChanceMax = mailboxCountersReadRepairChanceMax; this.mailboxCountersReadRepairChanceOneHundred = mailboxCountersReadRepairChanceOneHundred; this.mailboxReadStrongConsistency = mailboxReadStrongConsistency; this.messageReadStrongConsistency = messageReadStrongConsistency; this.messageWriteStrongConsistency = messageWriteStrongConsistency; this.optimisticConsistencyLevel = optimisticConsistencyLevel; this.mailRepositoryStrongConsistency = mailRepositoryStrongConsistency; this.uidReadStrongConsistency = uidReadStrongConsistency; this.modseqReadStrongConsistency = modseqReadStrongConsistency; this.aclEnabled = aclEnabled; this.uidModseqIncrement = uidModseqIncrement; } public boolean isUidReadStrongConsistency() { return uidReadStrongConsistency; } public boolean isModseqReadStrongConsistency() { return modseqReadStrongConsistency; } public boolean isMailboxReadStrongConsistency() { return mailboxReadStrongConsistency; } public boolean isMessageWriteStrongConsistency() { return messageWriteStrongConsistency; } public boolean isMessageReadStrongConsistency() { return messageReadStrongConsistency; } public float getMailboxReadRepair() { return mailboxReadRepair; } public int getBlobPartSize() { return blobPartSize; } public int getAclMaxRetry() { return aclMaxRetry; } public int getExpungeChunkSize() { return expungeChunkSize; } public int getFlagsUpdateMessageIdMaxRetry() { return flagsUpdateMessageIdMaxRetry; } public int getFlagsUpdateMessageMaxRetry() { return flagsUpdateMessageMaxRetry; } public int getModSeqMaxRetry() { return modSeqMaxRetry; } public int getUidMaxRetry() { return uidMaxRetry; } public int getFetchNextPageInAdvanceRow() { return fetchNextPageInAdvanceRow; } public int getAttachmentV2MigrationReadTimeout() { return attachmentV2MigrationReadTimeout; } public int getMessageAttachmentIdsReadTimeout() { return messageAttachmentIdsReadTimeout; } public String getConsistencyLevelRegular() { return consistencyLevelRegular; } public String getConsistencyLevelLightweightTransaction() { return consistencyLevelLightweightTransaction; } public float getMailboxCountersReadRepairChanceMax() { return mailboxCountersReadRepairChanceMax; } public float getMailboxCountersReadRepairChanceOneHundred() { return mailboxCountersReadRepairChanceOneHundred; } public boolean isOptimisticConsistencyLevel() { return optimisticConsistencyLevel; } public boolean isMailRepositoryStrongConsistency() { return mailRepositoryStrongConsistency; } public boolean isAclEnabled() { return aclEnabled; } public int getUidModseqIncrement() { return uidModseqIncrement; } @Override public final boolean equals(Object o) { if (o instanceof CassandraConfiguration) { CassandraConfiguration that = (CassandraConfiguration) o; return Objects.equals(this.aclMaxRetry, that.aclMaxRetry) && Objects.equals(this.expungeChunkSize, that.expungeChunkSize) && Objects.equals(this.flagsUpdateMessageIdMaxRetry, that.flagsUpdateMessageIdMaxRetry) && Objects.equals(this.flagsUpdateMessageMaxRetry, that.flagsUpdateMessageMaxRetry) && Objects.equals(this.modSeqMaxRetry, that.modSeqMaxRetry) && Objects.equals(this.uidMaxRetry, that.uidMaxRetry) && Objects.equals(this.mailboxReadRepair, that.mailboxReadRepair) && Objects.equals(this.mailboxCountersReadRepairChanceMax, that.mailboxCountersReadRepairChanceMax) && Objects.equals(this.mailboxCountersReadRepairChanceOneHundred, that.mailboxCountersReadRepairChanceOneHundred) && Objects.equals(this.fetchNextPageInAdvanceRow, that.fetchNextPageInAdvanceRow) && Objects.equals(this.blobPartSize, that.blobPartSize) && Objects.equals(this.attachmentV2MigrationReadTimeout, that.attachmentV2MigrationReadTimeout) && Objects.equals(this.mailboxReadStrongConsistency, that.mailboxReadStrongConsistency) && Objects.equals(this.messageAttachmentIdsReadTimeout, that.messageAttachmentIdsReadTimeout) && Objects.equals(this.messageReadStrongConsistency, that.messageReadStrongConsistency) && Objects.equals(this.messageWriteStrongConsistency, that.messageWriteStrongConsistency) && Objects.equals(this.consistencyLevelRegular, that.consistencyLevelRegular) && Objects.equals(this.consistencyLevelLightweightTransaction, that.consistencyLevelLightweightTransaction) && Objects.equals(this.optimisticConsistencyLevel, that.optimisticConsistencyLevel) && Objects.equals(this.uidReadStrongConsistency, that.uidReadStrongConsistency) && Objects.equals(this.modseqReadStrongConsistency, that.modseqReadStrongConsistency) && Objects.equals(this.mailRepositoryStrongConsistency, that.mailRepositoryStrongConsistency) && Objects.equals(this.aclEnabled, that.aclEnabled) && Objects.equals(this.uidModseqIncrement, that.uidModseqIncrement); } return false; } @Override public final int hashCode() { return Objects.hash(aclMaxRetry, expungeChunkSize, flagsUpdateMessageIdMaxRetry, flagsUpdateMessageMaxRetry, modSeqMaxRetry, uidMaxRetry, fetchNextPageInAdvanceRow, mailboxCountersReadRepairChanceOneHundred, mailboxCountersReadRepairChanceMax, blobPartSize, attachmentV2MigrationReadTimeout, messageAttachmentIdsReadTimeout, consistencyLevelRegular, consistencyLevelLightweightTransaction, mailboxReadRepair, messageReadStrongConsistency, mailboxReadStrongConsistency, messageWriteStrongConsistency, optimisticConsistencyLevel, mailRepositoryStrongConsistency, uidReadStrongConsistency, modseqReadStrongConsistency, aclEnabled, uidModseqIncrement); } @Override public String toString() { return MoreObjects.toStringHelper(this) .add("aclMaxRetry", aclMaxRetry) .add("expungeChunkSize", expungeChunkSize) .add("flagsUpdateMessageIdMaxRetry", flagsUpdateMessageIdMaxRetry) .add("flagsUpdateMessageMaxRetry", flagsUpdateMessageMaxRetry) .add("modSeqMaxRetry", modSeqMaxRetry) .add("fetchNextPageInAdvanceRow", fetchNextPageInAdvanceRow) .add("mailboxReadRepair", mailboxReadRepair) .add("mailboxCountersReadRepairChanceOneHundred", mailboxCountersReadRepairChanceOneHundred) .add("mailboxCountersReadRepairChanceMax", mailboxCountersReadRepairChanceMax) .add("uidMaxRetry", uidMaxRetry) .add("blobPartSize", blobPartSize) .add("attachmentV2MigrationReadTimeout", attachmentV2MigrationReadTimeout) .add("messageAttachmentIdsReadTimeout", messageAttachmentIdsReadTimeout) .add("messageReadStrongConsistency", messageReadStrongConsistency) .add("messageWriteStrongConsistency", messageWriteStrongConsistency) .add("mailboxReadStrongConsistency", mailboxReadStrongConsistency) .add("consistencyLevelRegular", consistencyLevelRegular) .add("consistencyLevelLightweightTransaction", consistencyLevelLightweightTransaction) .add("optimisticConsistencyLevel", optimisticConsistencyLevel) .add("mailRepositoryStrongConsistency", mailRepositoryStrongConsistency) .add("modseqReadStrongConsistency", modseqReadStrongConsistency) .add("uidReadStrongConsistency", uidReadStrongConsistency) .add("aclEnabled", aclEnabled) .add("uidModseqIncrement", uidModseqIncrement) .toString(); } }
apache/kafka
34,567
tools/src/test/java/org/apache/kafka/tools/ProducerPerformanceTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.tools; import org.apache.kafka.clients.producer.Callback; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.common.errors.AuthorizationException; import org.apache.kafka.common.utils.Utils; import net.sourceforge.argparse4j.inf.ArgumentParser; import net.sourceforge.argparse4j.inf.ArgumentParserException; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.Spy; import org.mockito.junit.jupiter.MockitoExtension; import org.mockito.junit.jupiter.MockitoSettings; import org.mockito.quality.Strictness; import java.io.File; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.util.ArrayList; import java.util.List; import java.util.Properties; import java.util.SplittableRandom; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @ExtendWith(MockitoExtension.class) @MockitoSettings(strictness = Strictness.STRICT_STUBS) public class ProducerPerformanceTest { @Mock KafkaProducer<byte[], byte[]> producerMock; @Spy ProducerPerformance producerPerformanceSpy; private File createTempFile(String contents) throws IOException { File file = File.createTempFile("ProducerPerformanceTest", ".tmp"); file.deleteOnExit(); Files.write(file.toPath(), contents.getBytes()); return file; } @Test public void testReadPayloadFile() throws Exception { File payloadFile = createTempFile("Hello\nKafka"); String payloadFilePath = payloadFile.getAbsolutePath(); String payloadDelimiter = "\n"; List<byte[]> payloadByteList = ProducerPerformance.readPayloadFile(payloadFilePath, payloadDelimiter); assertEquals(2, payloadByteList.size()); assertEquals("Hello", new String(payloadByteList.get(0))); assertEquals("Kafka", new String(payloadByteList.get(1))); Utils.delete(payloadFile); } @Test public void testReadProps() throws Exception { List<String> producerProps = List.of("bootstrap.servers=localhost:9000"); File producerConfig = createTempFile("acks=1"); Properties prop = ProducerPerformance.readProps(producerProps, producerConfig.getAbsolutePath()); assertNotNull(prop); assertEquals(5, prop.size()); Utils.delete(producerConfig); } @Test public void testReadPayloadFileWithAlternateDelimiters() throws Exception { List<byte[]> payloadByteList; payloadByteList = generateListFromFileUsingDelimiter("Hello~~Kafka", "~~"); assertEquals(2, payloadByteList.size()); assertEquals("Hello", new String(payloadByteList.get(0))); assertEquals("Kafka", new String(payloadByteList.get(1))); payloadByteList = generateListFromFileUsingDelimiter("Hello,Kafka,", ","); assertEquals(2, payloadByteList.size()); assertEquals("Hello", new String(payloadByteList.get(0))); assertEquals("Kafka", new String(payloadByteList.get(1))); payloadByteList = generateListFromFileUsingDelimiter("Hello\t\tKafka", "\t"); assertEquals(3, payloadByteList.size()); assertEquals("Hello", new String(payloadByteList.get(0))); assertEquals("Kafka", new String(payloadByteList.get(2))); payloadByteList = generateListFromFileUsingDelimiter("Hello\n\nKafka\n", "\n"); assertEquals(3, payloadByteList.size()); assertEquals("Hello", new String(payloadByteList.get(0))); assertEquals("Kafka", new String(payloadByteList.get(2))); payloadByteList = generateListFromFileUsingDelimiter("Hello::Kafka::World", "\\s*::\\s*"); assertEquals(3, payloadByteList.size()); assertEquals("Hello", new String(payloadByteList.get(0))); assertEquals("Kafka", new String(payloadByteList.get(1))); } @Test public void testCompareStringSplitWithScannerDelimiter() throws Exception { String contents = "Hello~~Kafka"; String payloadDelimiter = "~~"; compareList(generateListFromFileUsingDelimiter(contents, payloadDelimiter), contents.split(payloadDelimiter)); contents = "Hello,Kafka,"; payloadDelimiter = ","; compareList(generateListFromFileUsingDelimiter(contents, payloadDelimiter), contents.split(payloadDelimiter)); contents = "Hello\t\tKafka"; payloadDelimiter = "\t"; compareList(generateListFromFileUsingDelimiter(contents, payloadDelimiter), contents.split(payloadDelimiter)); contents = "Hello\n\nKafka\n"; payloadDelimiter = "\n"; compareList(generateListFromFileUsingDelimiter(contents, payloadDelimiter), contents.split(payloadDelimiter)); contents = "Hello::Kafka::World"; payloadDelimiter = "\\s*::\\s*"; compareList(generateListFromFileUsingDelimiter(contents, payloadDelimiter), contents.split(payloadDelimiter)); } private void compareList(List<byte[]> payloadByteList, String[] payloadByteListFromSplit) { assertEquals(payloadByteListFromSplit.length, payloadByteList.size()); for (int i = 0; i < payloadByteListFromSplit.length; i++) { assertEquals(payloadByteListFromSplit[i], new String(payloadByteList.get(i))); } } private List<byte[]> generateListFromFileUsingDelimiter(String fileContent, String payloadDelimiter) throws Exception { File payloadFile = null; List<byte[]> payloadByteList; try { payloadFile = createTempFile(fileContent); payloadByteList = ProducerPerformance.readPayloadFile(payloadFile.getAbsolutePath(), payloadDelimiter); } finally { Utils.delete(payloadFile); } return payloadByteList; } @Test public void testNumberOfCallsForSendAndClose() throws IOException { doReturn(null).when(producerMock).send(any(), any()); doReturn(producerMock).when(producerPerformanceSpy).createKafkaProducer(any(Properties.class)); String[] args = new String[] { "--topic", "Hello-Kafka", "--num-records", "5", "--throughput", "100", "--record-size", "100", "--bootstrap-server", "localhost:9000"}; producerPerformanceSpy.start(args); verify(producerMock, times(5)).send(any(), any()); verify(producerMock, times(1)).close(); } @Test public void testEnableTransaction() throws IOException { doReturn(null).when(producerMock).send(any(), any()); doReturn(producerMock).when(producerPerformanceSpy).createKafkaProducer(any(Properties.class)); String[] args = new String[] { "--topic", "Hello-Kafka", "--num-records", "5", "--throughput", "100", "--record-size", "100", "--transactional-id", "foobar", "--bootstrap-server", "localhost:9000"}; producerPerformanceSpy.start(args); verify(producerMock, times(1)).beginTransaction(); verify(producerMock, times(1)).commitTransaction(); verify(producerMock, times(1)).close(); } @Test public void testNumberOfSuccessfulSendAndClose() throws IOException { doReturn(producerMock).when(producerPerformanceSpy).createKafkaProducer(any(Properties.class)); doAnswer(invocation -> { producerPerformanceSpy.cb.onCompletion(null, null); return null; }).when(producerMock).send(any(), any()); String[] args = new String[] { "--topic", "Hello-Kafka", "--num-records", "10", "--throughput", "1", "--record-size", "100", "--bootstrap-server", "localhost:9000"}; producerPerformanceSpy.start(args); verify(producerMock, times(10)).send(any(), any()); assertEquals(10, producerPerformanceSpy.stats.totalCount()); verify(producerMock, times(1)).close(); } @Test public void testNumberOfFailedSendAndClose() throws IOException { doReturn(producerMock).when(producerPerformanceSpy).createKafkaProducer(any(Properties.class)); doAnswer(invocation -> { producerPerformanceSpy.cb.onCompletion(null, new AuthorizationException("not authorized.")); return null; }).when(producerMock).send(any(), any()); String[] args = new String[] { "--topic", "Hello-Kafka", "--num-records", "10", "--throughput", "1", "--record-size", "100", "--bootstrap-server", "localhost:9000"}; producerPerformanceSpy.start(args); verify(producerMock, times(10)).send(any(), any()); assertEquals(0, producerPerformanceSpy.stats.currentWindowCount()); assertEquals(0, producerPerformanceSpy.stats.totalCount()); verify(producerMock, times(1)).close(); } @Test public void testMutuallyExclusiveGroup() { String[] args1 = new String[] { "--topic", "Hello-Kafka", "--num-records", "5", "--throughput", "100", "--record-size", "100", "--payload-monotonic", "--bootstrap-server", "localhost:9000"}; ArgumentParser parser1 = ProducerPerformance.argParser(); ArgumentParserException thrown = assertThrows(ArgumentParserException.class, () -> parser1.parseArgs(args1)); assertEquals("argument --payload-monotonic: not allowed with argument --record-size", thrown.getMessage()); String[] args2 = new String[] { "--topic", "Hello-Kafka", "--num-records", "5", "--throughput", "100", "--payload-file", "abc.txt", "--payload-monotonic", "--bootstrap-server", "localhost:9000"}; ArgumentParser parser2 = ProducerPerformance.argParser(); thrown = assertThrows(ArgumentParserException.class, () -> parser2.parseArgs(args2)); assertEquals("argument --payload-monotonic: not allowed with argument --payload-file", thrown.getMessage()); } @Test public void testUnexpectedArg() { String[] args = new String[] { "--test", "test", "--topic", "Hello-Kafka", "--num-records", "5", "--throughput", "100", "--record-size", "100", "--bootstrap-server", "localhost:9000"}; ArgumentParser parser = ProducerPerformance.argParser(); ArgumentParserException thrown = assertThrows(ArgumentParserException.class, () -> parser.parseArgs(args)); assertEquals("unrecognized arguments: '--test'", thrown.getMessage()); } @Test public void testFractionalThroughput() { String[] args = new String[] { "--topic", "Hello-Kafka", "--num-records", "5", "--throughput", "1.25", "--record-size", "100", "--bootstrap-server", "localhost:9000"}; ArgumentParser parser = ProducerPerformance.argParser(); assertDoesNotThrow(() -> parser.parseArgs(args)); } @Test public void testGenerateRandomPayloadByPayloadFile() { Integer recordSize = null; String inputString = "Hello Kafka"; byte[] byteArray = inputString.getBytes(StandardCharsets.UTF_8); List<byte[]> payloadByteList = new ArrayList<>(); payloadByteList.add(byteArray); byte[] payload = null; SplittableRandom random = new SplittableRandom(0); payload = ProducerPerformance.generateRandomPayload(recordSize, payloadByteList, payload, random, false, 0L); assertEquals(inputString, new String(payload)); } @Test public void testGenerateRandomPayloadByRecordSize() { Integer recordSize = 100; byte[] payload = new byte[recordSize]; List<byte[]> payloadByteList = new ArrayList<>(); SplittableRandom random = new SplittableRandom(0); payload = ProducerPerformance.generateRandomPayload(recordSize, payloadByteList, payload, random, false, 0L); for (byte b : payload) { assertNotEquals(0, b); } } @Test public void testGenerateMonotonicPayload() { byte[] payload = null; List<byte[]> payloadByteList = new ArrayList<>(); SplittableRandom random = new SplittableRandom(0); for (int i = 0; i < 10; i++) { payload = ProducerPerformance.generateRandomPayload(null, payloadByteList, payload, random, true, i); assertEquals(1, payload.length); assertEquals(i + '0', payload[0]); } } @Test public void testGenerateRandomPayloadException() { Integer recordSize = null; byte[] payload = null; List<byte[]> payloadByteList = new ArrayList<>(); SplittableRandom random = new SplittableRandom(0); IllegalArgumentException thrown = assertThrows(IllegalArgumentException.class, () -> ProducerPerformance.generateRandomPayload(recordSize, payloadByteList, payload, random, false, 0L)); assertEquals("No payload file, record size or payload-monotonic option provided.", thrown.getMessage()); } @Test public void testClientIdOverride() throws Exception { List<String> producerProps = List.of("client.id=producer-1"); Properties prop = ProducerPerformance.readProps(producerProps, null); assertNotNull(prop); assertEquals("producer-1", prop.getProperty("client.id")); } @Test public void testDefaultClientId() throws Exception { List<String> producerProps = List.of("acks=1"); Properties prop = ProducerPerformance.readProps(producerProps, null); assertNotNull(prop); assertEquals("perf-producer-client", prop.getProperty("client.id")); } @Test public void testStatsInitializationWithLargeNumRecords() { long numRecords = Long.MAX_VALUE; assertDoesNotThrow(() -> new ProducerPerformance.Stats(numRecords, 5000L, false)); } @Test public void testStatsCorrectness() throws Exception { ExecutorService singleThreaded = Executors.newSingleThreadExecutor(); final long numRecords = 1000000; ProducerPerformance.Stats stats = new ProducerPerformance.Stats(numRecords, 5000L, false); for (long i = 0; i < numRecords; i++) { final Callback callback = new ProducerPerformance.PerfCallback(0, 100, stats, null); CompletableFuture.runAsync(() -> callback.onCompletion(null, null), singleThreaded); } singleThreaded.shutdown(); final boolean success = singleThreaded.awaitTermination(60, TimeUnit.SECONDS); assertTrue(success, "should have terminated"); assertEquals(numRecords, stats.totalCount()); assertEquals(numRecords, stats.iteration()); assertEquals(500000, stats.index()); assertEquals(1000000 * 100, stats.bytes()); } @Test public void testConfigPostProcessor() throws IOException, ArgumentParserException { ArgumentParser parser = ProducerPerformance.argParser(); String[] args = new String[]{ "--topic", "Hello-Kafka", "--num-records", "5", "--throughput", "100", "--record-size", "100", "--print-metrics", "--bootstrap-server", "localhost:9000", "--transactional-id", "foobar", "--transaction-duration-ms", "5000", }; ProducerPerformance.ConfigPostProcessor configs = new ProducerPerformance.ConfigPostProcessor(parser, args); assertEquals("localhost:9000", configs.bootstrapServers); assertEquals("Hello-Kafka", configs.topicName); assertEquals(5, configs.numRecords); assertEquals(100, configs.throughput); assertEquals(100, configs.recordSize); assertFalse(configs.payloadMonotonic); assertTrue(configs.shouldPrintMetrics); assertTrue(configs.payloadByteList.isEmpty()); Properties props = configs.producerProps; assertEquals(5, props.size()); assertTrue(configs.transactionsEnabled); assertEquals(5000, configs.transactionDurationMs); } @Test public void testInvalidConfigPostProcessor() { ArgumentParser parser = ProducerPerformance.argParser(); String[] invalidProducerProps = new String[]{ "--topic", "Hello-Kafka", "--num-records", "5", "--throughput", "100", "--record-size", "100"}; assertEquals("At least one of --bootstrap-server, --command-property, --producer-props, --producer.config or --command-config must be specified.", assertThrows(ArgumentParserException.class, () -> new ProducerPerformance.ConfigPostProcessor(parser, invalidProducerProps)).getMessage()); String[] invalidTransactionDurationMs = new String[]{ "--topic", "Hello-Kafka", "--num-records", "5", "--throughput", "100", "--record-size", "100", "--bootstrap-server", "localhost:9000", "--transaction-duration-ms", "0"}; assertEquals("--transaction-duration-ms should be greater than zero.", assertThrows(ArgumentParserException.class, () -> new ProducerPerformance.ConfigPostProcessor(parser, invalidTransactionDurationMs)).getMessage()); String[] invalidNumRecords = new String[]{ "--topic", "Hello-Kafka", "--num-records", "-5", "--throughput", "100", "--record-size", "100", "--bootstrap-server", "localhost:9000"}; assertEquals("--num-records should be greater than zero.", assertThrows(ArgumentParserException.class, () -> new ProducerPerformance.ConfigPostProcessor(parser, invalidNumRecords)).getMessage()); String[] invalidRecordSize = new String[]{ "--topic", "Hello-Kafka", "--num-records", "5", "--throughput", "100", "--record-size", "-100", "--bootstrap-server", "localhost:9000"}; assertEquals("--record-size should be greater than zero.", assertThrows(ArgumentParserException.class, () -> new ProducerPerformance.ConfigPostProcessor(parser, invalidRecordSize)).getMessage()); String[] invalidReportingInterval = new String[]{ "--topic", "Hello-Kafka", "--num-records", "5", "--throughput", "100", "--record-size", "100", "--reporting-interval", "0", "--bootstrap-server", "localhost:9000"}; assertEquals("--reporting-interval should be greater than zero.", assertThrows(ArgumentParserException.class, () -> new ProducerPerformance.ConfigPostProcessor(parser, invalidReportingInterval)).getMessage()); } @Test public void testBootstrapServer() throws IOException, ArgumentParserException { ArgumentParser parser = ProducerPerformance.argParser(); String[] args = new String[]{ "--topic", "Hello-Kafka", "--num-records", "5", "--throughput", "100", "--record-size", "100", "--bootstrap-server", "localhost:9000"}; ProducerPerformance.ConfigPostProcessor configs = new ProducerPerformance.ConfigPostProcessor(parser, args); assertEquals("localhost:9000", configs.producerProps.get(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG)); args = new String[]{ "--topic", "Hello-Kafka", "--num-records", "5", "--throughput", "100", "--record-size", "100", "--command-property", "bootstrap.servers=localhost:9001"}; configs = new ProducerPerformance.ConfigPostProcessor(parser, args); assertEquals("localhost:9001", configs.producerProps.get(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG)); args = new String[]{ "--topic", "Hello-Kafka", "--num-records", "5", "--throughput", "100", "--record-size", "100", "--bootstrap-server", "localhost:9000", "--command-property", "bootstrap.servers=localhost:9001"}; configs = new ProducerPerformance.ConfigPostProcessor(parser, args); assertEquals("localhost:9000", configs.producerProps.get(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG)); args = new String[]{ "--topic", "Hello-Kafka", "--num-records", "5", "--throughput", "100", "--record-size", "100", "--producer-props", "bootstrap.servers=localhost:9001"}; configs = new ProducerPerformance.ConfigPostProcessor(parser, args); assertEquals("localhost:9001", configs.producerProps.get(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG)); args = new String[]{ "--topic", "Hello-Kafka", "--num-records", "5", "--throughput", "100", "--record-size", "100", "--bootstrap-server", "localhost:9000", "--producer-props", "bootstrap.servers=localhost:9001"}; configs = new ProducerPerformance.ConfigPostProcessor(parser, args); assertEquals("localhost:9000", configs.producerProps.get(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG)); } @Test public void testNoTransactionRelatedConfigs() throws IOException, ArgumentParserException { ArgumentParser parser = ProducerPerformance.argParser(); String[] args = new String[]{ "--topic", "Hello-Kafka", "--num-records", "5", "--throughput", "100", "--record-size", "100", "--bootstrap-server", "localhost:9000"}; ProducerPerformance.ConfigPostProcessor configs = new ProducerPerformance.ConfigPostProcessor(parser, args); assertFalse(configs.transactionsEnabled); assertNull(configs.transactionDurationMs); assertFalse(configs.producerProps.contains(ProducerConfig.TRANSACTIONAL_ID_CONFIG)); } @Test public void testEnableTransactionByProducerProperty() throws IOException, ArgumentParserException { ArgumentParser parser = ProducerPerformance.argParser(); String[] args = new String[]{ "--topic", "Hello-Kafka", "--num-records", "5", "--throughput", "100", "--record-size", "100", "--bootstrap-server", "localhost:9000", "--command-property", "transactional.id=foobar"}; ProducerPerformance.ConfigPostProcessor configs = new ProducerPerformance.ConfigPostProcessor(parser, args); assertTrue(configs.transactionsEnabled); assertEquals(ProducerPerformance.DEFAULT_TRANSACTION_DURATION_MS, configs.transactionDurationMs); assertEquals("foobar", configs.producerProps.get(ProducerConfig.TRANSACTIONAL_ID_CONFIG)); } @Test public void testEnableTransactionByTransactionId() throws IOException, ArgumentParserException { File producerConfigFile = createTempFile("transactional.id=foobar"); ArgumentParser parser = ProducerPerformance.argParser(); String[] args = new String[]{ "--topic", "Hello-Kafka", "--num-records", "5", "--throughput", "100", "--record-size", "100", "--bootstrap-server", "localhost:9000", "--command-config", producerConfigFile.getAbsolutePath()}; ProducerPerformance.ConfigPostProcessor configs = new ProducerPerformance.ConfigPostProcessor(parser, args); assertTrue(configs.transactionsEnabled); assertEquals(ProducerPerformance.DEFAULT_TRANSACTION_DURATION_MS, configs.transactionDurationMs); assertEquals("foobar", configs.producerProps.get(ProducerConfig.TRANSACTIONAL_ID_CONFIG)); args = new String[]{ "--topic", "Hello-Kafka", "--num-records", "5", "--throughput", "100", "--record-size", "100", "--bootstrap-server", "localhost:9000", "--command-config", producerConfigFile.getAbsolutePath(), "--command-property", "transactional.id=hello_kafka"}; configs = new ProducerPerformance.ConfigPostProcessor(parser, args); assertTrue(configs.transactionsEnabled); assertEquals(ProducerPerformance.DEFAULT_TRANSACTION_DURATION_MS, configs.transactionDurationMs); assertEquals("hello_kafka", configs.producerProps.get(ProducerConfig.TRANSACTIONAL_ID_CONFIG)); args = new String[]{ "--topic", "Hello-Kafka", "--num-records", "5", "--throughput", "100", "--record-size", "100", "--transactional-id", "kafka_hello", "--bootstrap-server", "localhost:9000", "--command-config", producerConfigFile.getAbsolutePath(), "--command-property", "transactional.id=hello_kafka"}; configs = new ProducerPerformance.ConfigPostProcessor(parser, args); assertTrue(configs.transactionsEnabled); assertEquals(ProducerPerformance.DEFAULT_TRANSACTION_DURATION_MS, configs.transactionDurationMs); assertEquals("kafka_hello", configs.producerProps.get(ProducerConfig.TRANSACTIONAL_ID_CONFIG)); Utils.delete(producerConfigFile); } @Test public void testEnableTransactionByTransactionIdDeprecated() throws IOException, ArgumentParserException { File producerConfigFile = createTempFile("transactional.id=foobar"); ArgumentParser parser = ProducerPerformance.argParser(); String[] args = new String[]{ "--topic", "Hello-Kafka", "--num-records", "5", "--throughput", "100", "--record-size", "100", "--bootstrap-server", "localhost:9000", "--producer.config", producerConfigFile.getAbsolutePath()}; ProducerPerformance.ConfigPostProcessor configs = new ProducerPerformance.ConfigPostProcessor(parser, args); assertTrue(configs.transactionsEnabled); assertEquals(ProducerPerformance.DEFAULT_TRANSACTION_DURATION_MS, configs.transactionDurationMs); assertEquals("foobar", configs.producerProps.get(ProducerConfig.TRANSACTIONAL_ID_CONFIG)); args = new String[]{ "--topic", "Hello-Kafka", "--num-records", "5", "--throughput", "100", "--record-size", "100", "--bootstrap-server", "localhost:9000", "--producer.config", producerConfigFile.getAbsolutePath(), "--producer-props", "transactional.id=hello_kafka"}; configs = new ProducerPerformance.ConfigPostProcessor(parser, args); assertTrue(configs.transactionsEnabled); assertEquals(ProducerPerformance.DEFAULT_TRANSACTION_DURATION_MS, configs.transactionDurationMs); assertEquals("hello_kafka", configs.producerProps.get(ProducerConfig.TRANSACTIONAL_ID_CONFIG)); args = new String[]{ "--topic", "Hello-Kafka", "--num-records", "5", "--throughput", "100", "--record-size", "100", "--transactional-id", "kafka_hello", "--bootstrap-server", "localhost:9000", "--producer.config", producerConfigFile.getAbsolutePath(), "--producer-props", "transactional.id=hello_kafka"}; configs = new ProducerPerformance.ConfigPostProcessor(parser, args); assertTrue(configs.transactionsEnabled); assertEquals(ProducerPerformance.DEFAULT_TRANSACTION_DURATION_MS, configs.transactionDurationMs); assertEquals("kafka_hello", configs.producerProps.get(ProducerConfig.TRANSACTIONAL_ID_CONFIG)); Utils.delete(producerConfigFile); } @Test public void testEnsureDeprecatedAndModernArgumentsNotBothSpecified() throws IOException { File producerConfigFile = createTempFile("bootstrap.servers=localhost:9000"); String[] args = new String[]{ "--topic", "Hello-Kafka", "--num-records", "5", "--throughput", "100", "--record-size", "100", "--producer.config", producerConfigFile.getAbsolutePath(), "--command-config", producerConfigFile.getAbsolutePath()}; ArgumentParser parser = ProducerPerformance.argParser(); assertThrows(ArgumentParserException.class, () -> new ProducerPerformance.ConfigPostProcessor(parser, args)); String[] args2 = new String[]{ "--topic", "Hello-Kafka", "--num-records", "5", "--throughput", "100", "--record-size", "100", "--producer-props", "bootstrap.servers=localhost:9090", "--command-property", "bootstrap.servers=localhost:9090"}; assertThrows(ArgumentParserException.class, () -> new ProducerPerformance.ConfigPostProcessor(parser, args2)); Utils.delete(producerConfigFile); } @Test public void testEnableTransactionByTransactionDurationMs() throws IOException, ArgumentParserException { ArgumentParser parser = ProducerPerformance.argParser(); String[] args = new String[]{ "--topic", "Hello-Kafka", "--num-records", "5", "--throughput", "100", "--record-size", "100", "--transaction-duration-ms", "5000", "--bootstrap-server", "localhost:9000"}; ProducerPerformance.ConfigPostProcessor configs = new ProducerPerformance.ConfigPostProcessor(parser, args); assertTrue(configs.transactionsEnabled); assertEquals(5000, configs.transactionDurationMs); assertTrue(configs.producerProps.get(ProducerConfig.TRANSACTIONAL_ID_CONFIG).toString() .startsWith(ProducerPerformance.DEFAULT_TRANSACTION_ID_PREFIX)); } @Test public void testWarmupRecordsFractionalValue() { String[] args = new String[] { "--topic", "Hello-Kafka", "--num-records", "10", "--warmup-records", "1.5", "--throughput", "100", "--record-size", "100", "--bootstrap-server", "localhost:9000"}; ArgumentParser parser = ProducerPerformance.argParser(); ArgumentParserException thrown = assertThrows(ArgumentParserException.class, () -> parser.parseArgs(args)); thrown.printStackTrace(); } @Test public void testWarmupRecordsString() { String[] args = new String[] { "--topic", "Hello-Kafka", "--num-records", "10", "--warmup-records", "foo", "--throughput", "100", "--record-size", "100", "--bootstrap-server", "localhost:9000"}; ArgumentParser parser = ProducerPerformance.argParser(); ArgumentParserException thrown = assertThrows(ArgumentParserException.class, () -> parser.parseArgs(args)); thrown.printStackTrace(); } @Test public void testWarmupNumberOfSuccessfulSendAndClose() throws IOException { doReturn(producerMock).when(producerPerformanceSpy).createKafkaProducer(any(Properties.class)); doAnswer(invocation -> { producerPerformanceSpy.cb.onCompletion(null, null); return null; }).when(producerMock).send(any(), any()); String[] args = new String[] { "--topic", "Hello-Kafka", "--num-records", "10", "--warmup-records", "2", "--throughput", "1", "--record-size", "100", "--bootstrap-server", "localhost:9000"}; producerPerformanceSpy.start(args); verify(producerMock, times(10)).send(any(), any()); assertEquals(10, producerPerformanceSpy.stats.totalCount()); assertEquals(10 - 2, producerPerformanceSpy.steadyStateStats.totalCount()); verify(producerMock, times(1)).close(); } @Test public void testWarmupNegativeRecordsNormalTest() throws IOException { doReturn(producerMock).when(producerPerformanceSpy).createKafkaProducer(any(Properties.class)); doAnswer(invocation -> { producerPerformanceSpy.cb.onCompletion(null, null); return null; }).when(producerMock).send(any(), any()); String[] args = new String[] { "--topic", "Hello-Kafka", "--num-records", "10", "--warmup-records", "-1", "--throughput", "1", "--record-size", "100", "--bootstrap-server", "localhost:9000"}; producerPerformanceSpy.start(args); verify(producerMock, times(10)).send(any(), any()); assertEquals(10, producerPerformanceSpy.stats.totalCount()); verify(producerMock, times(1)).close(); } }
googleapis/google-cloud-java
35,043
java-developerconnect/proto-google-cloud-developerconnect-v1/src/main/java/com/google/cloud/developerconnect/v1/FetchReadTokenResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/developerconnect/v1/developer_connect.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.developerconnect.v1; /** * * * <pre> * Message for responding to get read token. * </pre> * * Protobuf type {@code google.cloud.developerconnect.v1.FetchReadTokenResponse} */ public final class FetchReadTokenResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.developerconnect.v1.FetchReadTokenResponse) FetchReadTokenResponseOrBuilder { private static final long serialVersionUID = 0L; // Use FetchReadTokenResponse.newBuilder() to construct. private FetchReadTokenResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private FetchReadTokenResponse() { token_ = ""; gitUsername_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new FetchReadTokenResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.developerconnect.v1.DeveloperConnectProto .internal_static_google_cloud_developerconnect_v1_FetchReadTokenResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.developerconnect.v1.DeveloperConnectProto .internal_static_google_cloud_developerconnect_v1_FetchReadTokenResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.developerconnect.v1.FetchReadTokenResponse.class, com.google.cloud.developerconnect.v1.FetchReadTokenResponse.Builder.class); } private int bitField0_; public static final int TOKEN_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object token_ = ""; /** * * * <pre> * The token content. * </pre> * * <code>string token = 1;</code> * * @return The token. */ @java.lang.Override public java.lang.String getToken() { java.lang.Object ref = token_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); token_ = s; return s; } } /** * * * <pre> * The token content. * </pre> * * <code>string token = 1;</code> * * @return The bytes for token. */ @java.lang.Override public com.google.protobuf.ByteString getTokenBytes() { java.lang.Object ref = token_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); token_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int EXPIRATION_TIME_FIELD_NUMBER = 2; private com.google.protobuf.Timestamp expirationTime_; /** * * * <pre> * Expiration timestamp. Can be empty if unknown or non-expiring. * </pre> * * <code>.google.protobuf.Timestamp expiration_time = 2;</code> * * @return Whether the expirationTime field is set. */ @java.lang.Override public boolean hasExpirationTime() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Expiration timestamp. Can be empty if unknown or non-expiring. * </pre> * * <code>.google.protobuf.Timestamp expiration_time = 2;</code> * * @return The expirationTime. */ @java.lang.Override public com.google.protobuf.Timestamp getExpirationTime() { return expirationTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : expirationTime_; } /** * * * <pre> * Expiration timestamp. Can be empty if unknown or non-expiring. * </pre> * * <code>.google.protobuf.Timestamp expiration_time = 2;</code> */ @java.lang.Override public com.google.protobuf.TimestampOrBuilder getExpirationTimeOrBuilder() { return expirationTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : expirationTime_; } public static final int GIT_USERNAME_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object gitUsername_ = ""; /** * * * <pre> * The git_username to specify when making a git clone with the * token. For example, for GitHub GitRepositoryLinks, this would be * "x-access-token" * </pre> * * <code>string git_username = 3;</code> * * @return The gitUsername. */ @java.lang.Override public java.lang.String getGitUsername() { java.lang.Object ref = gitUsername_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); gitUsername_ = s; return s; } } /** * * * <pre> * The git_username to specify when making a git clone with the * token. For example, for GitHub GitRepositoryLinks, this would be * "x-access-token" * </pre> * * <code>string git_username = 3;</code> * * @return The bytes for gitUsername. */ @java.lang.Override public com.google.protobuf.ByteString getGitUsernameBytes() { java.lang.Object ref = gitUsername_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); gitUsername_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(token_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, token_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getExpirationTime()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(gitUsername_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, gitUsername_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(token_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, token_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getExpirationTime()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(gitUsername_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, gitUsername_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.developerconnect.v1.FetchReadTokenResponse)) { return super.equals(obj); } com.google.cloud.developerconnect.v1.FetchReadTokenResponse other = (com.google.cloud.developerconnect.v1.FetchReadTokenResponse) obj; if (!getToken().equals(other.getToken())) return false; if (hasExpirationTime() != other.hasExpirationTime()) return false; if (hasExpirationTime()) { if (!getExpirationTime().equals(other.getExpirationTime())) return false; } if (!getGitUsername().equals(other.getGitUsername())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + TOKEN_FIELD_NUMBER; hash = (53 * hash) + getToken().hashCode(); if (hasExpirationTime()) { hash = (37 * hash) + EXPIRATION_TIME_FIELD_NUMBER; hash = (53 * hash) + getExpirationTime().hashCode(); } hash = (37 * hash) + GIT_USERNAME_FIELD_NUMBER; hash = (53 * hash) + getGitUsername().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.developerconnect.v1.FetchReadTokenResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.developerconnect.v1.FetchReadTokenResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.developerconnect.v1.FetchReadTokenResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.developerconnect.v1.FetchReadTokenResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.developerconnect.v1.FetchReadTokenResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.developerconnect.v1.FetchReadTokenResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.developerconnect.v1.FetchReadTokenResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.developerconnect.v1.FetchReadTokenResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.developerconnect.v1.FetchReadTokenResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.developerconnect.v1.FetchReadTokenResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.developerconnect.v1.FetchReadTokenResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.developerconnect.v1.FetchReadTokenResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.developerconnect.v1.FetchReadTokenResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Message for responding to get read token. * </pre> * * Protobuf type {@code google.cloud.developerconnect.v1.FetchReadTokenResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.developerconnect.v1.FetchReadTokenResponse) com.google.cloud.developerconnect.v1.FetchReadTokenResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.developerconnect.v1.DeveloperConnectProto .internal_static_google_cloud_developerconnect_v1_FetchReadTokenResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.developerconnect.v1.DeveloperConnectProto .internal_static_google_cloud_developerconnect_v1_FetchReadTokenResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.developerconnect.v1.FetchReadTokenResponse.class, com.google.cloud.developerconnect.v1.FetchReadTokenResponse.Builder.class); } // Construct using com.google.cloud.developerconnect.v1.FetchReadTokenResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getExpirationTimeFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; token_ = ""; expirationTime_ = null; if (expirationTimeBuilder_ != null) { expirationTimeBuilder_.dispose(); expirationTimeBuilder_ = null; } gitUsername_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.developerconnect.v1.DeveloperConnectProto .internal_static_google_cloud_developerconnect_v1_FetchReadTokenResponse_descriptor; } @java.lang.Override public com.google.cloud.developerconnect.v1.FetchReadTokenResponse getDefaultInstanceForType() { return com.google.cloud.developerconnect.v1.FetchReadTokenResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.developerconnect.v1.FetchReadTokenResponse build() { com.google.cloud.developerconnect.v1.FetchReadTokenResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.developerconnect.v1.FetchReadTokenResponse buildPartial() { com.google.cloud.developerconnect.v1.FetchReadTokenResponse result = new com.google.cloud.developerconnect.v1.FetchReadTokenResponse(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.developerconnect.v1.FetchReadTokenResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.token_ = token_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.expirationTime_ = expirationTimeBuilder_ == null ? expirationTime_ : expirationTimeBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.gitUsername_ = gitUsername_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.developerconnect.v1.FetchReadTokenResponse) { return mergeFrom((com.google.cloud.developerconnect.v1.FetchReadTokenResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.developerconnect.v1.FetchReadTokenResponse other) { if (other == com.google.cloud.developerconnect.v1.FetchReadTokenResponse.getDefaultInstance()) return this; if (!other.getToken().isEmpty()) { token_ = other.token_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasExpirationTime()) { mergeExpirationTime(other.getExpirationTime()); } if (!other.getGitUsername().isEmpty()) { gitUsername_ = other.gitUsername_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { token_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getExpirationTimeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 26: { gitUsername_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object token_ = ""; /** * * * <pre> * The token content. * </pre> * * <code>string token = 1;</code> * * @return The token. */ public java.lang.String getToken() { java.lang.Object ref = token_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); token_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The token content. * </pre> * * <code>string token = 1;</code> * * @return The bytes for token. */ public com.google.protobuf.ByteString getTokenBytes() { java.lang.Object ref = token_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); token_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The token content. * </pre> * * <code>string token = 1;</code> * * @param value The token to set. * @return This builder for chaining. */ public Builder setToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } token_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The token content. * </pre> * * <code>string token = 1;</code> * * @return This builder for chaining. */ public Builder clearToken() { token_ = getDefaultInstance().getToken(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * The token content. * </pre> * * <code>string token = 1;</code> * * @param value The bytes for token to set. * @return This builder for chaining. */ public Builder setTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); token_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.protobuf.Timestamp expirationTime_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> expirationTimeBuilder_; /** * * * <pre> * Expiration timestamp. Can be empty if unknown or non-expiring. * </pre> * * <code>.google.protobuf.Timestamp expiration_time = 2;</code> * * @return Whether the expirationTime field is set. */ public boolean hasExpirationTime() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Expiration timestamp. Can be empty if unknown or non-expiring. * </pre> * * <code>.google.protobuf.Timestamp expiration_time = 2;</code> * * @return The expirationTime. */ public com.google.protobuf.Timestamp getExpirationTime() { if (expirationTimeBuilder_ == null) { return expirationTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : expirationTime_; } else { return expirationTimeBuilder_.getMessage(); } } /** * * * <pre> * Expiration timestamp. Can be empty if unknown or non-expiring. * </pre> * * <code>.google.protobuf.Timestamp expiration_time = 2;</code> */ public Builder setExpirationTime(com.google.protobuf.Timestamp value) { if (expirationTimeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } expirationTime_ = value; } else { expirationTimeBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Expiration timestamp. Can be empty if unknown or non-expiring. * </pre> * * <code>.google.protobuf.Timestamp expiration_time = 2;</code> */ public Builder setExpirationTime(com.google.protobuf.Timestamp.Builder builderForValue) { if (expirationTimeBuilder_ == null) { expirationTime_ = builderForValue.build(); } else { expirationTimeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Expiration timestamp. Can be empty if unknown or non-expiring. * </pre> * * <code>.google.protobuf.Timestamp expiration_time = 2;</code> */ public Builder mergeExpirationTime(com.google.protobuf.Timestamp value) { if (expirationTimeBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && expirationTime_ != null && expirationTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { getExpirationTimeBuilder().mergeFrom(value); } else { expirationTime_ = value; } } else { expirationTimeBuilder_.mergeFrom(value); } if (expirationTime_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Expiration timestamp. Can be empty if unknown or non-expiring. * </pre> * * <code>.google.protobuf.Timestamp expiration_time = 2;</code> */ public Builder clearExpirationTime() { bitField0_ = (bitField0_ & ~0x00000002); expirationTime_ = null; if (expirationTimeBuilder_ != null) { expirationTimeBuilder_.dispose(); expirationTimeBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Expiration timestamp. Can be empty if unknown or non-expiring. * </pre> * * <code>.google.protobuf.Timestamp expiration_time = 2;</code> */ public com.google.protobuf.Timestamp.Builder getExpirationTimeBuilder() { bitField0_ |= 0x00000002; onChanged(); return getExpirationTimeFieldBuilder().getBuilder(); } /** * * * <pre> * Expiration timestamp. Can be empty if unknown or non-expiring. * </pre> * * <code>.google.protobuf.Timestamp expiration_time = 2;</code> */ public com.google.protobuf.TimestampOrBuilder getExpirationTimeOrBuilder() { if (expirationTimeBuilder_ != null) { return expirationTimeBuilder_.getMessageOrBuilder(); } else { return expirationTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : expirationTime_; } } /** * * * <pre> * Expiration timestamp. Can be empty if unknown or non-expiring. * </pre> * * <code>.google.protobuf.Timestamp expiration_time = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> getExpirationTimeFieldBuilder() { if (expirationTimeBuilder_ == null) { expirationTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( getExpirationTime(), getParentForChildren(), isClean()); expirationTime_ = null; } return expirationTimeBuilder_; } private java.lang.Object gitUsername_ = ""; /** * * * <pre> * The git_username to specify when making a git clone with the * token. For example, for GitHub GitRepositoryLinks, this would be * "x-access-token" * </pre> * * <code>string git_username = 3;</code> * * @return The gitUsername. */ public java.lang.String getGitUsername() { java.lang.Object ref = gitUsername_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); gitUsername_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The git_username to specify when making a git clone with the * token. For example, for GitHub GitRepositoryLinks, this would be * "x-access-token" * </pre> * * <code>string git_username = 3;</code> * * @return The bytes for gitUsername. */ public com.google.protobuf.ByteString getGitUsernameBytes() { java.lang.Object ref = gitUsername_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); gitUsername_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The git_username to specify when making a git clone with the * token. For example, for GitHub GitRepositoryLinks, this would be * "x-access-token" * </pre> * * <code>string git_username = 3;</code> * * @param value The gitUsername to set. * @return This builder for chaining. */ public Builder setGitUsername(java.lang.String value) { if (value == null) { throw new NullPointerException(); } gitUsername_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The git_username to specify when making a git clone with the * token. For example, for GitHub GitRepositoryLinks, this would be * "x-access-token" * </pre> * * <code>string git_username = 3;</code> * * @return This builder for chaining. */ public Builder clearGitUsername() { gitUsername_ = getDefaultInstance().getGitUsername(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * The git_username to specify when making a git clone with the * token. For example, for GitHub GitRepositoryLinks, this would be * "x-access-token" * </pre> * * <code>string git_username = 3;</code> * * @param value The bytes for gitUsername to set. * @return This builder for chaining. */ public Builder setGitUsernameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); gitUsername_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.developerconnect.v1.FetchReadTokenResponse) } // @@protoc_insertion_point(class_scope:google.cloud.developerconnect.v1.FetchReadTokenResponse) private static final com.google.cloud.developerconnect.v1.FetchReadTokenResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.developerconnect.v1.FetchReadTokenResponse(); } public static com.google.cloud.developerconnect.v1.FetchReadTokenResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<FetchReadTokenResponse> PARSER = new com.google.protobuf.AbstractParser<FetchReadTokenResponse>() { @java.lang.Override public FetchReadTokenResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<FetchReadTokenResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<FetchReadTokenResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.developerconnect.v1.FetchReadTokenResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/maven-surefire
35,386
surefire-providers/surefire-junit-platform/src/test/java/org/apache/maven/surefire/junitplatform/RunListenerAdapterTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.maven.surefire.junitplatform; import java.lang.reflect.Method; import java.util.Collections; import java.util.Map; import java.util.Optional; import org.apache.maven.surefire.api.report.ReportEntry; import org.apache.maven.surefire.api.report.SimpleReportEntry; import org.apache.maven.surefire.api.report.StackTraceWriter; import org.apache.maven.surefire.api.report.Stoppable; import org.apache.maven.surefire.api.report.TestOutputReportEntry; import org.apache.maven.surefire.api.report.TestReportListener; import org.apache.maven.surefire.api.report.TestSetReportEntry; import org.apache.maven.surefire.report.PojoStackTraceWriter; import org.junit.Before; import org.junit.Test; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.DisplayNameGenerator; import org.junit.jupiter.engine.config.DefaultJupiterConfiguration; import org.junit.jupiter.engine.config.JupiterConfiguration; import org.junit.jupiter.engine.descriptor.ClassTestDescriptor; import org.junit.jupiter.engine.descriptor.TestMethodTestDescriptor; import org.junit.jupiter.engine.descriptor.TestTemplateTestDescriptor; import org.junit.platform.engine.ConfigurationParameters; import org.junit.platform.engine.TestDescriptor; import org.junit.platform.engine.TestSource; import org.junit.platform.engine.UniqueId; import org.junit.platform.engine.reporting.OutputDirectoryProvider; import org.junit.platform.engine.support.descriptor.AbstractTestDescriptor; import org.junit.platform.engine.support.descriptor.ClassSource; import org.junit.platform.engine.support.descriptor.EngineDescriptor; import org.junit.platform.engine.support.descriptor.MethodSource; import org.junit.platform.launcher.TestIdentifier; import org.junit.platform.launcher.TestPlan; import org.mockito.ArgumentCaptor; import org.mockito.InOrder; import org.opentest4j.TestSkippedException; import static java.util.Collections.emptyList; import static java.util.Collections.singleton; import static java.util.Collections.singletonList; import static org.apache.maven.surefire.api.report.RunMode.NORMAL_RUN; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.platform.engine.TestDescriptor.Type.CONTAINER; import static org.junit.platform.engine.TestDescriptor.Type.TEST; import static org.junit.platform.engine.TestExecutionResult.aborted; import static org.junit.platform.engine.TestExecutionResult.failed; import static org.junit.platform.engine.TestExecutionResult.successful; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.verifyZeroInteractions; import static org.mockito.Mockito.when; import static org.powermock.reflect.Whitebox.getInternalState; /** * Unit tests for {@link RunListenerAdapter}. * * @since 2.22.0 */ @SuppressWarnings("checkstyle:magicnumber") public class RunListenerAdapterTest { private static final ConfigurationParameters CONFIG_PARAMS = mock(ConfigurationParameters.class); private static final OutputDirectoryProvider OUTPUT_DIRECTORY = mock(OutputDirectoryProvider.class); private TestReportListener<TestOutputReportEntry> listener; private RunListenerAdapter adapter; @Before public void setUp() { listener = mock(TestReportListener.class); adapter = new RunListenerAdapter(listener, Stoppable.NOOP); adapter.testPlanExecutionStarted(TestPlan.from(emptyList(), CONFIG_PARAMS, OUTPUT_DIRECTORY)); adapter.setRunMode(NORMAL_RUN); } @Test public void notifiedWithCorrectNamesWhenMethodExecutionStarted() throws Exception { ArgumentCaptor<ReportEntry> entryCaptor = ArgumentCaptor.forClass(ReportEntry.class); TestPlan testPlan = TestPlan.from( singletonList(new EngineDescriptor(newId(), "Luke's Plan")), CONFIG_PARAMS, OUTPUT_DIRECTORY); adapter.testPlanExecutionStarted(testPlan); TestIdentifier methodIdentifier = identifiersAsParentOnTestPlan(testPlan, newClassDescriptor(), newMethodDescriptor()); adapter.executionStarted(methodIdentifier); verify(listener).testStarting(entryCaptor.capture()); ReportEntry entry = entryCaptor.getValue(); assertEquals(MY_TEST_METHOD_NAME, entry.getName()); assertEquals(MyTestClass.class.getName(), entry.getSourceName()); assertNull(entry.getStackTraceWriter()); } @Test public void notifiedWithCompatibleNameForMethodWithArguments() throws Exception { ArgumentCaptor<ReportEntry> entryCaptor = ArgumentCaptor.forClass(ReportEntry.class); TestPlan testPlan = TestPlan.from( singletonList(new EngineDescriptor(newId(), "Luke's Plan")), CONFIG_PARAMS, OUTPUT_DIRECTORY); adapter.testPlanExecutionStarted(testPlan); TestIdentifier methodIdentifier = identifiersAsParentOnTestPlan(testPlan, newClassDescriptor(), newMethodDescriptor(String.class)); adapter.executionStarted(methodIdentifier); verify(listener).testStarting(entryCaptor.capture()); ReportEntry entry = entryCaptor.getValue(); assertEquals(MY_TEST_METHOD_NAME + "(String)", entry.getName()); assertNull(entry.getNameText()); assertEquals(MyTestClass.class.getName(), entry.getSourceName()); assertNull(entry.getSourceText()); assertNull(entry.getStackTraceWriter()); } @Test public void notifiedEagerlyForTestSetWhenClassExecutionStarted() throws Exception { EngineDescriptor engine = newEngineDescriptor(); TestDescriptor parent = newClassDescriptor(); engine.addChild(parent); TestDescriptor child = newMethodDescriptor(); parent.addChild(child); TestPlan plan = TestPlan.from(singletonList(engine), CONFIG_PARAMS, OUTPUT_DIRECTORY); String className = MyTestClass.class.getName(); adapter.testPlanExecutionStarted(plan); adapter.executionStarted(TestIdentifier.from(engine)); adapter.executionStarted(TestIdentifier.from(parent)); verify(listener) .testSetStarting(new SimpleReportEntry(NORMAL_RUN, 0x0000000100000000L, className, null, null, null)); verifyNoMoreInteractions(listener); adapter.executionStarted(TestIdentifier.from(child)); verify(listener) .testStarting(new SimpleReportEntry( NORMAL_RUN, 0x0000000100000001L, className, null, MY_TEST_METHOD_NAME, null)); verifyNoMoreInteractions(listener); adapter.executionFinished(TestIdentifier.from(child), successful()); ArgumentCaptor<SimpleReportEntry> report = ArgumentCaptor.forClass(SimpleReportEntry.class); verify(listener).testSucceeded(report.capture()); assertThat(report.getValue().getRunMode()).isEqualTo(NORMAL_RUN); assertThat(report.getValue().getTestRunId()).isEqualTo(0x0000000100000001L); assertThat(report.getValue().getSourceName()).isEqualTo(className); assertThat(report.getValue().getSourceText()).isNull(); assertThat(report.getValue().getName()).isEqualTo(MY_TEST_METHOD_NAME); assertThat(report.getValue().getNameText()).isNull(); assertThat(report.getValue().getElapsed()).isNotNull(); assertThat(report.getValue().getSystemProperties()).isEmpty(); verifyNoMoreInteractions(listener); adapter.executionFinished(TestIdentifier.from(parent), successful()); report = ArgumentCaptor.forClass(SimpleReportEntry.class); verify(listener).testSetCompleted(report.capture()); assertThat(report.getValue().getSourceName()).isEqualTo(className); assertThat(report.getValue().getName()).isNull(); assertThat(report.getValue().getElapsed()).isNotNull(); assertThat(report.getValue().getSystemProperties()).isNotEmpty(); verifyNoMoreInteractions(listener); adapter.executionFinished(TestIdentifier.from(engine), successful()); verifyNoMoreInteractions(listener); } @Test public void displayNamesInClassAndMethods() throws Exception { EngineDescriptor engine = newEngineDescriptor(); TestDescriptor parent = newClassDescriptor("parent"); engine.addChild(parent); UniqueId id1 = parent.getUniqueId().append(MyTestClass.class.getName(), MY_NAMED_TEST_METHOD_NAME); Method m1 = MyTestClass.class.getDeclaredMethod(MY_NAMED_TEST_METHOD_NAME); TestDescriptor child1 = new TestMethodTestDescriptorWithDisplayName(id1, MyTestClass.class, m1, "dn1"); parent.addChild(child1); UniqueId id2 = parent.getUniqueId().append(MyTestClass.class.getName(), MY_TEST_METHOD_NAME); Method m2 = MyTestClass.class.getDeclaredMethod(MY_TEST_METHOD_NAME, String.class); TestDescriptor child2 = new TestMethodTestDescriptor( id2, MyTestClass.class, m2, Collections::emptyList, new DefaultJupiterConfiguration(CONFIG_PARAMS, OUTPUT_DIRECTORY)); parent.addChild(child2); TestPlan plan = TestPlan.from(singletonList(engine), CONFIG_PARAMS, OUTPUT_DIRECTORY); InOrder inOrder = inOrder(listener); adapter.testPlanExecutionStarted(plan); adapter.executionStarted(TestIdentifier.from(engine)); adapter.executionStarted(TestIdentifier.from(parent)); ArgumentCaptor<SimpleReportEntry> report = ArgumentCaptor.forClass(SimpleReportEntry.class); inOrder.verify(listener).testSetStarting(report.capture()); assertThat(report.getValue().getTestRunId()).isEqualTo(0x0000000100000000L); assertThat(report.getValue().getSourceName()).isEqualTo(MyTestClass.class.getName()); assertThat(report.getValue().getSourceText()).isEqualTo("parent"); assertThat(report.getValue().getName()).isNull(); assertThat(report.getValue().getSystemProperties()).isEmpty(); verifyZeroInteractions(listener); adapter.executionStarted(TestIdentifier.from(child1)); inOrder.verify(listener) .testStarting(new SimpleReportEntry( NORMAL_RUN, 0x0000000100000001L, MyTestClass.class.getName(), "parent", MY_NAMED_TEST_METHOD_NAME, "dn1")); inOrder.verifyNoMoreInteractions(); adapter.executionFinished(TestIdentifier.from(child1), successful()); report = ArgumentCaptor.forClass(SimpleReportEntry.class); inOrder.verify(listener).testSucceeded(report.capture()); assertThat(report.getValue().getRunMode()).isEqualTo(NORMAL_RUN); assertThat(report.getValue().getTestRunId()).isEqualTo(0x0000000100000001L); assertThat(report.getValue().getSourceName()).isEqualTo(MyTestClass.class.getName()); assertThat(report.getValue().getSourceText()).isEqualTo("parent"); assertThat(report.getValue().getName()).isEqualTo(MY_NAMED_TEST_METHOD_NAME); assertThat(report.getValue().getNameText()).isEqualTo("dn1"); assertThat(report.getValue().getElapsed()).isNotNull(); assertThat(report.getValue().getSystemProperties()).isEmpty(); inOrder.verifyNoMoreInteractions(); adapter.executionStarted(TestIdentifier.from(child2)); inOrder.verify(listener) .testStarting(new SimpleReportEntry( NORMAL_RUN, 0x0000000100000002L, MyTestClass.class.getName(), "parent", MY_TEST_METHOD_NAME + "(String)", null)); inOrder.verifyNoMoreInteractions(); Exception assumptionFailure = new Exception(); adapter.executionFinished(TestIdentifier.from(child2), aborted(assumptionFailure)); report = ArgumentCaptor.forClass(SimpleReportEntry.class); inOrder.verify(listener).testAssumptionFailure(report.capture()); assertThat(report.getValue().getRunMode()).isEqualTo(NORMAL_RUN); assertThat(report.getValue().getTestRunId()).isEqualTo(0x0000000100000002L); assertThat(report.getValue().getSourceName()).isEqualTo(MyTestClass.class.getName()); assertThat(report.getValue().getSourceText()).isEqualTo("parent"); assertThat(report.getValue().getName()).isEqualTo(MY_TEST_METHOD_NAME + "(String)"); assertThat(report.getValue().getNameText()).isNull(); assertThat(report.getValue().getElapsed()).isNotNull(); assertThat(report.getValue().getSystemProperties()).isEmpty(); assertThat(report.getValue().getStackTraceWriter()).isNotNull(); assertThat(report.getValue().getStackTraceWriter().getThrowable().getTarget()) .isSameAs(assumptionFailure); inOrder.verifyNoMoreInteractions(); adapter.executionFinished(TestIdentifier.from(parent), successful()); inOrder.verify(listener).testSetCompleted(report.capture()); assertThat(report.getValue().getSourceName()).isEqualTo(MyTestClass.class.getName()); assertThat(report.getValue().getSourceText()).isEqualTo("parent"); assertThat(report.getValue().getName()).isNull(); assertThat(report.getValue().getNameText()).isNull(); assertThat(report.getValue().getElapsed()).isNotNull(); assertThat(report.getValue().getSystemProperties()).isNotEmpty(); assertThat(report.getValue().getStackTraceWriter()).isNull(); inOrder.verifyNoMoreInteractions(); adapter.executionFinished(TestIdentifier.from(engine), successful()); inOrder.verifyNoMoreInteractions(); } @Test public void notifiedForUnclassifiedTestIdentifier() { EngineDescriptor engine = new EngineDescriptor(UniqueId.forEngine("engine"), "engine") { @Override public Type getType() { return TEST; } }; TestPlan plan = TestPlan.from(singletonList(engine), CONFIG_PARAMS, OUTPUT_DIRECTORY); adapter.testPlanExecutionStarted(plan); assertThat((TestPlan) getInternalState(adapter, "testPlan")).isSameAs(plan); assertThat((Map) getInternalState(adapter, "testStartTime")).isEmpty(); adapter.executionStarted(TestIdentifier.from(engine)); verify(listener) .testStarting(new SimpleReportEntry(NORMAL_RUN, 0x0000000100000001L, "engine", null, "engine", null)); verifyNoMoreInteractions(listener); adapter.executionFinished(TestIdentifier.from(engine), successful()); ArgumentCaptor<SimpleReportEntry> report = ArgumentCaptor.forClass(SimpleReportEntry.class); verify(listener).testSucceeded(report.capture()); assertThat(report.getValue().getRunMode()).isEqualTo(NORMAL_RUN); assertThat(report.getValue().getTestRunId()).isEqualTo(0x0000000100000001L); assertThat(report.getValue().getSourceName()).isEqualTo("engine"); assertThat(report.getValue().getSourceText()).isNull(); assertThat(report.getValue().getName()).isEqualTo("engine"); assertThat(report.getValue().getNameText()).isNull(); assertThat(report.getValue().getElapsed()).isNotNull(); assertThat(report.getValue().getStackTraceWriter()).isNull(); assertThat(report.getValue().getSystemProperties()).isEmpty(); adapter.testPlanExecutionFinished(plan); assertThat((TestPlan) getInternalState(adapter, "testPlan")).isNull(); assertThat((Map) getInternalState(adapter, "testStartTime")).isEmpty(); verifyNoMoreInteractions(listener); } @Test public void notNotifiedWhenEngineExecutionStarted() { adapter.executionStarted(newEngineIdentifier()); verify(listener, never()).testStarting(any()); } @Test public void notifiedWhenMethodExecutionSkipped() throws Exception { adapter.executionSkipped(newMethodIdentifier(), "test"); verify(listener).testSkipped(any()); } @Test public void notifiedWithCorrectNamesWhenClassExecutionSkipped() throws Exception { EngineDescriptor engineDescriptor = new EngineDescriptor(newId(), "Luke's Plan"); TestDescriptor classTestDescriptor = newClassDescriptor(); TestDescriptor method1 = newMethodDescriptor(); classTestDescriptor.addChild(method1); TestDescriptor method2 = newMethodDescriptor(); classTestDescriptor.addChild(method2); engineDescriptor.addChild(classTestDescriptor); TestPlan testPlan = TestPlan.from(singletonList(engineDescriptor), CONFIG_PARAMS, OUTPUT_DIRECTORY); adapter.testPlanExecutionStarted(testPlan); TestIdentifier classIdentifier = identifiersAsParentOnTestPlan(testPlan, newEngineDescriptor(), newClassDescriptor()); ArgumentCaptor<TestSetReportEntry> entryCaptor1 = ArgumentCaptor.forClass(TestSetReportEntry.class); ArgumentCaptor<ReportEntry> entryCaptor2 = ArgumentCaptor.forClass(ReportEntry.class); ArgumentCaptor<ReportEntry> entryCaptor3 = ArgumentCaptor.forClass(ReportEntry.class); ArgumentCaptor<TestSetReportEntry> entryCaptor4 = ArgumentCaptor.forClass(TestSetReportEntry.class); adapter.executionSkipped(classIdentifier, "test"); verify(listener).testSetStarting(entryCaptor1.capture()); verify(listener).testSkipped(entryCaptor2.capture()); verify(listener).testSkipped(entryCaptor3.capture()); verify(listener).testSetCompleted(entryCaptor4.capture()); ReportEntry entry1 = entryCaptor1.getValue(); assertNull(entry1.getName()); assertEquals(MyTestClass.class.getTypeName(), entry1.getSourceName()); ReportEntry entry4 = entryCaptor1.getValue(); assertNull(entry4.getName()); assertEquals(MyTestClass.class.getTypeName(), entry4.getSourceName()); } @Test public void notifiedWhenMethodExecutionAborted() throws Exception { adapter.executionFinished(newMethodIdentifier(), aborted(null)); verify(listener).testAssumptionFailure(any()); } @Test public void notifiedWhenClassExecutionAborted() { TestSkippedException t = new TestSkippedException("skipped"); adapter.executionFinished(newClassIdentifier(), aborted(t)); String source = MyTestClass.class.getName(); StackTraceWriter stw = new PojoStackTraceWriter(source, null, t); ArgumentCaptor<SimpleReportEntry> report = ArgumentCaptor.forClass(SimpleReportEntry.class); verify(listener).testSetCompleted(report.capture()); assertThat(report.getValue().getSourceName()).isEqualTo(source); assertThat(report.getValue().getStackTraceWriter()).isEqualTo(stw); } @Test public void notifiedOfContainerFailureWhenErrored() throws Exception { adapter.executionFinished(newContainerIdentifier(), failed(new RuntimeException())); verify(listener).testError(any()); } @Test public void notifiedOfContainerFailureWhenFailed() throws Exception { adapter.executionFinished(newContainerIdentifier(), failed(new AssertionError())); verify(listener).testFailed(any()); } @Test public void notifiedWhenMethodExecutionFailed() throws Exception { adapter.executionFinished(newMethodIdentifier(), failed(new AssertionError())); verify(listener).testFailed(any()); } @Test public void notifiedWhenMethodExecutionFailedWithError() throws Exception { adapter.executionFinished(newMethodIdentifier(), failed(new RuntimeException())); verify(listener).testError(any()); } @Test public void notifiedWithCorrectNamesWhenClassExecutionFailed() { ArgumentCaptor<ReportEntry> entryCaptor = ArgumentCaptor.forClass(ReportEntry.class); TestPlan testPlan = TestPlan.from( singletonList(new EngineDescriptor(newId(), "Luke's Plan")), CONFIG_PARAMS, OUTPUT_DIRECTORY); adapter.testPlanExecutionStarted(testPlan); adapter.executionFinished( identifiersAsParentOnTestPlan(testPlan, newClassDescriptor()), failed(new AssertionError())); verify(listener).testFailed(entryCaptor.capture()); ReportEntry entry = entryCaptor.getValue(); assertEquals(MyTestClass.class.getTypeName(), entry.getSourceName()); assertNull(entry.getName()); assertNotNull(entry.getStackTraceWriter()); assertNotNull(entry.getStackTraceWriter().getThrowable()); assertThat(entry.getStackTraceWriter().getThrowable().getTarget()).isInstanceOf(AssertionError.class); } @Test public void notifiedWithCorrectNamesWhenClassExecutionErrored() { ArgumentCaptor<ReportEntry> entryCaptor = ArgumentCaptor.forClass(ReportEntry.class); TestPlan testPlan = TestPlan.from( singletonList(new EngineDescriptor(newId(), "Luke's Plan")), CONFIG_PARAMS, OUTPUT_DIRECTORY); adapter.testPlanExecutionStarted(testPlan); adapter.executionFinished( identifiersAsParentOnTestPlan(testPlan, newClassDescriptor()), failed(new RuntimeException())); verify(listener).testError(entryCaptor.capture()); ReportEntry entry = entryCaptor.getValue(); assertEquals(MyTestClass.class.getTypeName(), entry.getSourceName()); assertNull(entry.getName()); assertNotNull(entry.getStackTraceWriter()); assertNotNull(entry.getStackTraceWriter().getThrowable()); assertThat(entry.getStackTraceWriter().getThrowable().getTarget()).isInstanceOf(RuntimeException.class); } @Test public void notifiedWithCorrectNamesWhenContainerFailed() throws Exception { ArgumentCaptor<ReportEntry> entryCaptor = ArgumentCaptor.forClass(ReportEntry.class); TestPlan testPlan = TestPlan.from( singletonList(new EngineDescriptor(newId(), "Luke's Plan")), CONFIG_PARAMS, OUTPUT_DIRECTORY); adapter.testPlanExecutionStarted(testPlan); adapter.executionFinished(newContainerIdentifier(), failed(new RuntimeException())); verify(listener).testError(entryCaptor.capture()); ReportEntry entry = entryCaptor.getValue(); assertEquals(MyTestClass.class.getTypeName(), entry.getSourceName()); assertEquals(MY_TEST_METHOD_NAME, entry.getName()); assertNotNull(entry.getStackTraceWriter()); assertNotNull(entry.getStackTraceWriter().getThrowable()); assertThat(entry.getStackTraceWriter().getThrowable().getTarget()).isInstanceOf(RuntimeException.class); } @Test public void notifiedWhenMethodExecutionSucceeded() throws Exception { adapter.executionFinished(newMethodIdentifier(), successful()); verify(listener).testSucceeded(any()); } @Test public void notifiedForTestSetWhenClassExecutionSucceeded() { EngineDescriptor engineDescriptor = newEngineDescriptor(); TestDescriptor classDescriptor = newClassDescriptor(); engineDescriptor.addChild(classDescriptor); adapter.testPlanExecutionStarted(TestPlan.from(singleton(engineDescriptor), CONFIG_PARAMS, OUTPUT_DIRECTORY)); adapter.executionStarted(TestIdentifier.from(classDescriptor)); adapter.executionFinished(TestIdentifier.from(classDescriptor), successful()); String className = MyTestClass.class.getName(); verify(listener) .testSetStarting(new SimpleReportEntry(NORMAL_RUN, 0x0000000100000000L, className, null, null, null)); ArgumentCaptor<SimpleReportEntry> report = ArgumentCaptor.forClass(SimpleReportEntry.class); verify(listener).testSetCompleted(report.capture()); assertThat(report.getValue().getRunMode()).isEqualTo(NORMAL_RUN); assertThat(report.getValue().getTestRunId()).isEqualTo(0x0000000100000000L); assertThat(report.getValue().getSourceName()).isEqualTo(className); assertThat(report.getValue().getSourceText()).isNull(); assertThat(report.getValue().getName()).isNull(); assertThat(report.getValue().getNameText()).isNull(); assertThat(report.getValue().getStackTraceWriter()).isNull(); assertThat(report.getValue().getElapsed()).isNotNull(); assertThat(report.getValue().getSystemProperties()).isNotEmpty(); verify(listener, never()).testSucceeded(any()); verifyNoMoreInteractions(listener); } @Test public void notifiedWithParentDisplayNameWhenTestClassUnknown() { // Set up a test plan TestPlan plan = TestPlan.from( singletonList(new EngineDescriptor(newId(), "Luke's Plan")), CONFIG_PARAMS, OUTPUT_DIRECTORY); adapter.testPlanExecutionStarted(plan); // Use the test plan to set up child with parent. final String parentDisplay = "I am your father"; TestIdentifier child = newSourcelessChildIdentifierWithParent(plan, parentDisplay, null); adapter.executionStarted(child); // Check that the adapter has informed Surefire that the test has been invoked, // with the parent name as source (since the test case itself had no source). ArgumentCaptor<ReportEntry> entryCaptor = ArgumentCaptor.forClass(ReportEntry.class); verify(listener).testStarting(entryCaptor.capture()); assertEquals(parentDisplay, entryCaptor.getValue().getSourceName()); assertNull(entryCaptor.getValue().getSourceText()); assertNull(entryCaptor.getValue().getName()); assertNull(entryCaptor.getValue().getNameText()); } @Test public void stackTraceWriterPresentWhenParentHasSource() { TestPlan plan = TestPlan.from( singletonList(new EngineDescriptor(newId(), "Some Plan")), CONFIG_PARAMS, OUTPUT_DIRECTORY); adapter.testPlanExecutionStarted(plan); TestIdentifier child = newSourcelessChildIdentifierWithParent(plan, "Parent", ClassSource.from(MyTestClass.class)); adapter.executionFinished(child, failed(new RuntimeException())); ArgumentCaptor<ReportEntry> entryCaptor = ArgumentCaptor.forClass(ReportEntry.class); verify(listener).testError(entryCaptor.capture()); assertNotNull(entryCaptor.getValue().getStackTraceWriter()); } @Test public void stackTraceWriterDefaultsToTestClass() { TestPlan plan = TestPlan.from( singletonList(new EngineDescriptor(newId(), "Some Plan")), CONFIG_PARAMS, OUTPUT_DIRECTORY); adapter.testPlanExecutionStarted(plan); TestIdentifier child = newSourcelessChildIdentifierWithParent(plan, "Parent", null); adapter.executionFinished(child, failed(new RuntimeException("message"))); ArgumentCaptor<ReportEntry> entryCaptor = ArgumentCaptor.forClass(ReportEntry.class); verify(listener).testError(entryCaptor.capture()); assertNotNull(entryCaptor.getValue().getStackTraceWriter()); assertNotNull(entryCaptor.getValue().getStackTraceWriter().smartTrimmedStackTrace()); assertNotNull(entryCaptor.getValue().getStackTraceWriter().writeTraceToString()); assertNotNull(entryCaptor.getValue().getStackTraceWriter().writeTrimmedTraceToString()); } @Test public void stackTraceWriterPresentEvenWithoutException() throws Exception { adapter.executionFinished(newMethodIdentifier(), failed(null)); ArgumentCaptor<ReportEntry> entryCaptor = ArgumentCaptor.forClass(ReportEntry.class); verify(listener).testError(entryCaptor.capture()); assertNotNull(entryCaptor.getValue().getStackTraceWriter()); } @Test public void displayNamesIgnoredInReport() throws NoSuchMethodException { TestMethodTestDescriptorWithDisplayName descriptor = new TestMethodTestDescriptorWithDisplayName( newId(), MyTestClass.class, MyTestClass.class.getDeclaredMethod("myNamedTestMethod"), "some display name"); TestIdentifier factoryIdentifier = TestIdentifier.from(descriptor); ArgumentCaptor<ReportEntry> entryCaptor = ArgumentCaptor.forClass(ReportEntry.class); adapter.executionSkipped(factoryIdentifier, ""); verify(listener).testSkipped(entryCaptor.capture()); ReportEntry value = entryCaptor.getValue(); assertEquals(MyTestClass.class.getName(), value.getSourceName()); assertNull(value.getSourceText()); assertEquals("myNamedTestMethod", value.getName()); assertEquals("some display name", value.getNameText()); } private static TestIdentifier newMethodIdentifier() throws Exception { return TestIdentifier.from(newMethodDescriptor()); } private static TestDescriptor newMethodDescriptor(Class<?>... parameterTypes) throws Exception { return new TestMethodTestDescriptor( UniqueId.forEngine("method"), MyTestClass.class, MyTestClass.class.getDeclaredMethod(MY_TEST_METHOD_NAME, parameterTypes), Collections::emptyList, new DefaultJupiterConfiguration(CONFIG_PARAMS, OUTPUT_DIRECTORY)); } private static TestIdentifier newClassIdentifier() { return TestIdentifier.from(newClassDescriptor()); } private static TestDescriptor newClassDescriptor(String displayName) { JupiterConfiguration jupiterConfiguration = mock(JupiterConfiguration.class); DisplayNameGenerator displayNameGenerator = mock(DisplayNameGenerator.class); when(displayNameGenerator.generateDisplayNameForClass(MyTestClass.class)) .thenReturn(displayName); when(jupiterConfiguration.getDefaultDisplayNameGenerator()).thenReturn(displayNameGenerator); return new ClassTestDescriptor( UniqueId.root("class", MyTestClass.class.getName()), MyTestClass.class, jupiterConfiguration) {}; } private static TestDescriptor newClassDescriptor() { return new ClassTestDescriptor( UniqueId.root("class", MyTestClass.class.getName()), MyTestClass.class, new DefaultJupiterConfiguration(CONFIG_PARAMS, OUTPUT_DIRECTORY)); } private static TestIdentifier newSourcelessChildIdentifierWithParent( TestPlan testPlan, String parentDisplay, TestSource parentTestSource) { // A parent test identifier with a name. TestDescriptor parent = mock(TestDescriptor.class); when(parent.getUniqueId()).thenReturn(newId()); when(parent.getDisplayName()).thenReturn(parentDisplay); when(parent.getLegacyReportingName()).thenReturn(parentDisplay); when(parent.getSource()).thenReturn(Optional.ofNullable(parentTestSource)); when(parent.getType()).thenReturn(CONTAINER); TestIdentifier parentId = TestIdentifier.from(parent); // The (child) test case that is to be executed as part of a test plan. TestDescriptor child = mock(TestDescriptor.class); when(child.getUniqueId()).thenReturn(newId()); when(child.getType()).thenReturn(TEST); when(child.getLegacyReportingName()).thenReturn("child"); // Ensure the child source is null yet that there is a parent -- the special case to be tested. when(child.getSource()).thenReturn(Optional.empty()); when(child.getParent()).thenReturn(Optional.of(parent)); TestIdentifier childId = TestIdentifier.from(child); testPlan.addInternal(childId); testPlan.addInternal(parentId); return childId; } private static TestIdentifier newContainerIdentifier() throws Exception { return TestIdentifier.from(new TestTemplateTestDescriptor( UniqueId.forEngine("method"), MyTestClass.class, MyTestClass.class.getDeclaredMethod(MY_TEST_METHOD_NAME), Collections::emptyList, new DefaultJupiterConfiguration(CONFIG_PARAMS, OUTPUT_DIRECTORY))); } private static TestIdentifier newEngineIdentifier() { TestDescriptor testDescriptor = newEngineDescriptor(); return TestIdentifier.from(testDescriptor); } private static EngineDescriptor newEngineDescriptor() { return new EngineDescriptor(UniqueId.forEngine("engine"), "engine"); } private static TestIdentifier identifiersAsParentOnTestPlan( TestPlan plan, TestDescriptor parent, TestDescriptor child) { child.setParent(parent); TestIdentifier parentIdentifier = TestIdentifier.from(parent); TestIdentifier childIdentifier = TestIdentifier.from(child); plan.addInternal(parentIdentifier); plan.addInternal(childIdentifier); return childIdentifier; } private static TestIdentifier identifiersAsParentOnTestPlan(TestPlan plan, TestDescriptor root) { TestIdentifier rootIdentifier = TestIdentifier.from(root); plan.addInternal(rootIdentifier); return rootIdentifier; } private static UniqueId newId() { return UniqueId.forEngine("engine"); } private static final String MY_TEST_METHOD_NAME = "myTestMethod"; private static final String MY_NAMED_TEST_METHOD_NAME = "myNamedTestMethod"; private static class MyTestClass { @org.junit.jupiter.api.Test void myTestMethod() {} @org.junit.jupiter.api.Test void myTestMethod(String foo) {} @DisplayName("name") @org.junit.jupiter.api.Test void myNamedTestMethod() {} } static class TestMethodTestDescriptorWithDisplayName extends AbstractTestDescriptor { private TestMethodTestDescriptorWithDisplayName( UniqueId uniqueId, Class<?> testClass, Method testMethod, String displayName) { super(uniqueId, displayName, MethodSource.from(testClass, testMethod)); } @Override public Type getType() { return Type.TEST; } } }
google/java-photoslibrary
35,212
photoslibraryapi/src/main/java/com/google/photos/library/v1/proto/UpdateMediaItemRequest.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/photos/library/v1/photos_library.proto package com.google.photos.library.v1.proto; /** * * * <pre> * Request to update a media item in Google Photos. * </pre> * * Protobuf type {@code google.photos.library.v1.UpdateMediaItemRequest} */ public final class UpdateMediaItemRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.photos.library.v1.UpdateMediaItemRequest) UpdateMediaItemRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateMediaItemRequest.newBuilder() to construct. private UpdateMediaItemRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateMediaItemRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateMediaItemRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.photos.library.v1.proto.LibraryServiceProto .internal_static_google_photos_library_v1_UpdateMediaItemRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.photos.library.v1.proto.LibraryServiceProto .internal_static_google_photos_library_v1_UpdateMediaItemRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.photos.library.v1.proto.UpdateMediaItemRequest.class, com.google.photos.library.v1.proto.UpdateMediaItemRequest.Builder.class); } public static final int MEDIA_ITEM_FIELD_NUMBER = 1; private com.google.photos.types.proto.MediaItem mediaItem_; /** * * * <pre> * Required. The [MediaItem][google.photos.types.MediaItem] to update. * The media item's `id` field is used to identify the media item to be * updated. * The media item's `description` field is used to set the new media item * description. * </pre> * * <code>.google.photos.types.MediaItem media_item = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the mediaItem field is set. */ @java.lang.Override public boolean hasMediaItem() { return mediaItem_ != null; } /** * * * <pre> * Required. The [MediaItem][google.photos.types.MediaItem] to update. * The media item's `id` field is used to identify the media item to be * updated. * The media item's `description` field is used to set the new media item * description. * </pre> * * <code>.google.photos.types.MediaItem media_item = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The mediaItem. */ @java.lang.Override public com.google.photos.types.proto.MediaItem getMediaItem() { return mediaItem_ == null ? com.google.photos.types.proto.MediaItem.getDefaultInstance() : mediaItem_; } /** * * * <pre> * Required. The [MediaItem][google.photos.types.MediaItem] to update. * The media item's `id` field is used to identify the media item to be * updated. * The media item's `description` field is used to set the new media item * description. * </pre> * * <code>.google.photos.types.MediaItem media_item = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.photos.types.proto.MediaItemOrBuilder getMediaItemOrBuilder() { return mediaItem_ == null ? com.google.photos.types.proto.MediaItem.getDefaultInstance() : mediaItem_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Required. Indicate what fields in the provided media item to update. * The only valid value is `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return updateMask_ != null; } /** * * * <pre> * Required. Indicate what fields in the provided media item to update. * The only valid value is `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Required. Indicate what fields in the provided media item to update. * The only valid value is `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (mediaItem_ != null) { output.writeMessage(1, getMediaItem()); } if (updateMask_ != null) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (mediaItem_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getMediaItem()); } if (updateMask_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.photos.library.v1.proto.UpdateMediaItemRequest)) { return super.equals(obj); } com.google.photos.library.v1.proto.UpdateMediaItemRequest other = (com.google.photos.library.v1.proto.UpdateMediaItemRequest) obj; if (hasMediaItem() != other.hasMediaItem()) return false; if (hasMediaItem()) { if (!getMediaItem().equals(other.getMediaItem())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasMediaItem()) { hash = (37 * hash) + MEDIA_ITEM_FIELD_NUMBER; hash = (53 * hash) + getMediaItem().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.photos.library.v1.proto.UpdateMediaItemRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.photos.library.v1.proto.UpdateMediaItemRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.photos.library.v1.proto.UpdateMediaItemRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.photos.library.v1.proto.UpdateMediaItemRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.photos.library.v1.proto.UpdateMediaItemRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.photos.library.v1.proto.UpdateMediaItemRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.photos.library.v1.proto.UpdateMediaItemRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.photos.library.v1.proto.UpdateMediaItemRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.photos.library.v1.proto.UpdateMediaItemRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.photos.library.v1.proto.UpdateMediaItemRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.photos.library.v1.proto.UpdateMediaItemRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.photos.library.v1.proto.UpdateMediaItemRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.photos.library.v1.proto.UpdateMediaItemRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request to update a media item in Google Photos. * </pre> * * Protobuf type {@code google.photos.library.v1.UpdateMediaItemRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.photos.library.v1.UpdateMediaItemRequest) com.google.photos.library.v1.proto.UpdateMediaItemRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.photos.library.v1.proto.LibraryServiceProto .internal_static_google_photos_library_v1_UpdateMediaItemRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.photos.library.v1.proto.LibraryServiceProto .internal_static_google_photos_library_v1_UpdateMediaItemRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.photos.library.v1.proto.UpdateMediaItemRequest.class, com.google.photos.library.v1.proto.UpdateMediaItemRequest.Builder.class); } // Construct using com.google.photos.library.v1.proto.UpdateMediaItemRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; mediaItem_ = null; if (mediaItemBuilder_ != null) { mediaItemBuilder_.dispose(); mediaItemBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.photos.library.v1.proto.LibraryServiceProto .internal_static_google_photos_library_v1_UpdateMediaItemRequest_descriptor; } @java.lang.Override public com.google.photos.library.v1.proto.UpdateMediaItemRequest getDefaultInstanceForType() { return com.google.photos.library.v1.proto.UpdateMediaItemRequest.getDefaultInstance(); } @java.lang.Override public com.google.photos.library.v1.proto.UpdateMediaItemRequest build() { com.google.photos.library.v1.proto.UpdateMediaItemRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.photos.library.v1.proto.UpdateMediaItemRequest buildPartial() { com.google.photos.library.v1.proto.UpdateMediaItemRequest result = new com.google.photos.library.v1.proto.UpdateMediaItemRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.photos.library.v1.proto.UpdateMediaItemRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.mediaItem_ = mediaItemBuilder_ == null ? mediaItem_ : mediaItemBuilder_.build(); } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.photos.library.v1.proto.UpdateMediaItemRequest) { return mergeFrom((com.google.photos.library.v1.proto.UpdateMediaItemRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.photos.library.v1.proto.UpdateMediaItemRequest other) { if (other == com.google.photos.library.v1.proto.UpdateMediaItemRequest.getDefaultInstance()) return this; if (other.hasMediaItem()) { mergeMediaItem(other.getMediaItem()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getMediaItemFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.photos.types.proto.MediaItem mediaItem_; private com.google.protobuf.SingleFieldBuilderV3< com.google.photos.types.proto.MediaItem, com.google.photos.types.proto.MediaItem.Builder, com.google.photos.types.proto.MediaItemOrBuilder> mediaItemBuilder_; /** * * * <pre> * Required. The [MediaItem][google.photos.types.MediaItem] to update. * The media item's `id` field is used to identify the media item to be * updated. * The media item's `description` field is used to set the new media item * description. * </pre> * * <code> * .google.photos.types.MediaItem media_item = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the mediaItem field is set. */ public boolean hasMediaItem() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The [MediaItem][google.photos.types.MediaItem] to update. * The media item's `id` field is used to identify the media item to be * updated. * The media item's `description` field is used to set the new media item * description. * </pre> * * <code> * .google.photos.types.MediaItem media_item = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The mediaItem. */ public com.google.photos.types.proto.MediaItem getMediaItem() { if (mediaItemBuilder_ == null) { return mediaItem_ == null ? com.google.photos.types.proto.MediaItem.getDefaultInstance() : mediaItem_; } else { return mediaItemBuilder_.getMessage(); } } /** * * * <pre> * Required. The [MediaItem][google.photos.types.MediaItem] to update. * The media item's `id` field is used to identify the media item to be * updated. * The media item's `description` field is used to set the new media item * description. * </pre> * * <code> * .google.photos.types.MediaItem media_item = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setMediaItem(com.google.photos.types.proto.MediaItem value) { if (mediaItemBuilder_ == null) { if (value == null) { throw new NullPointerException(); } mediaItem_ = value; } else { mediaItemBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The [MediaItem][google.photos.types.MediaItem] to update. * The media item's `id` field is used to identify the media item to be * updated. * The media item's `description` field is used to set the new media item * description. * </pre> * * <code> * .google.photos.types.MediaItem media_item = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setMediaItem(com.google.photos.types.proto.MediaItem.Builder builderForValue) { if (mediaItemBuilder_ == null) { mediaItem_ = builderForValue.build(); } else { mediaItemBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The [MediaItem][google.photos.types.MediaItem] to update. * The media item's `id` field is used to identify the media item to be * updated. * The media item's `description` field is used to set the new media item * description. * </pre> * * <code> * .google.photos.types.MediaItem media_item = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeMediaItem(com.google.photos.types.proto.MediaItem value) { if (mediaItemBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && mediaItem_ != null && mediaItem_ != com.google.photos.types.proto.MediaItem.getDefaultInstance()) { getMediaItemBuilder().mergeFrom(value); } else { mediaItem_ = value; } } else { mediaItemBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The [MediaItem][google.photos.types.MediaItem] to update. * The media item's `id` field is used to identify the media item to be * updated. * The media item's `description` field is used to set the new media item * description. * </pre> * * <code> * .google.photos.types.MediaItem media_item = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearMediaItem() { bitField0_ = (bitField0_ & ~0x00000001); mediaItem_ = null; if (mediaItemBuilder_ != null) { mediaItemBuilder_.dispose(); mediaItemBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The [MediaItem][google.photos.types.MediaItem] to update. * The media item's `id` field is used to identify the media item to be * updated. * The media item's `description` field is used to set the new media item * description. * </pre> * * <code> * .google.photos.types.MediaItem media_item = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.photos.types.proto.MediaItem.Builder getMediaItemBuilder() { bitField0_ |= 0x00000001; onChanged(); return getMediaItemFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The [MediaItem][google.photos.types.MediaItem] to update. * The media item's `id` field is used to identify the media item to be * updated. * The media item's `description` field is used to set the new media item * description. * </pre> * * <code> * .google.photos.types.MediaItem media_item = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.photos.types.proto.MediaItemOrBuilder getMediaItemOrBuilder() { if (mediaItemBuilder_ != null) { return mediaItemBuilder_.getMessageOrBuilder(); } else { return mediaItem_ == null ? com.google.photos.types.proto.MediaItem.getDefaultInstance() : mediaItem_; } } /** * * * <pre> * Required. The [MediaItem][google.photos.types.MediaItem] to update. * The media item's `id` field is used to identify the media item to be * updated. * The media item's `description` field is used to set the new media item * description. * </pre> * * <code> * .google.photos.types.MediaItem media_item = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.photos.types.proto.MediaItem, com.google.photos.types.proto.MediaItem.Builder, com.google.photos.types.proto.MediaItemOrBuilder> getMediaItemFieldBuilder() { if (mediaItemBuilder_ == null) { mediaItemBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.photos.types.proto.MediaItem, com.google.photos.types.proto.MediaItem.Builder, com.google.photos.types.proto.MediaItemOrBuilder>( getMediaItem(), getParentForChildren(), isClean()); mediaItem_ = null; } return mediaItemBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Required. Indicate what fields in the provided media item to update. * The only valid value is `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Indicate what fields in the provided media item to update. * The only valid value is `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Required. Indicate what fields in the provided media item to update. * The only valid value is `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Indicate what fields in the provided media item to update. * The only valid value is `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Indicate what fields in the provided media item to update. * The only valid value is `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Indicate what fields in the provided media item to update. * The only valid value is `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Indicate what fields in the provided media item to update. * The only valid value is `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Indicate what fields in the provided media item to update. * The only valid value is `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Required. Indicate what fields in the provided media item to update. * The only valid value is `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.photos.library.v1.UpdateMediaItemRequest) } // @@protoc_insertion_point(class_scope:google.photos.library.v1.UpdateMediaItemRequest) private static final com.google.photos.library.v1.proto.UpdateMediaItemRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.photos.library.v1.proto.UpdateMediaItemRequest(); } public static com.google.photos.library.v1.proto.UpdateMediaItemRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateMediaItemRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateMediaItemRequest>() { @java.lang.Override public UpdateMediaItemRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateMediaItemRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateMediaItemRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.photos.library.v1.proto.UpdateMediaItemRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,086
java-dialogflow-cx/proto-google-cloud-dialogflow-cx-v3beta1/src/main/java/com/google/cloud/dialogflow/cx/v3beta1/VariantsHistory.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/cx/v3beta1/experiment.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dialogflow.cx.v3beta1; /** * * * <pre> * The history of variants update. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.VariantsHistory} */ public final class VariantsHistory extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3beta1.VariantsHistory) VariantsHistoryOrBuilder { private static final long serialVersionUID = 0L; // Use VariantsHistory.newBuilder() to construct. private VariantsHistory(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private VariantsHistory() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new VariantsHistory(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3beta1.ExperimentProto .internal_static_google_cloud_dialogflow_cx_v3beta1_VariantsHistory_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3beta1.ExperimentProto .internal_static_google_cloud_dialogflow_cx_v3beta1_VariantsHistory_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory.class, com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory.Builder.class); } private int bitField0_; private int variantsCase_ = 0; @SuppressWarnings("serial") private java.lang.Object variants_; public enum VariantsCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { VERSION_VARIANTS(1), VARIANTS_NOT_SET(0); private final int value; private VariantsCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static VariantsCase valueOf(int value) { return forNumber(value); } public static VariantsCase forNumber(int value) { switch (value) { case 1: return VERSION_VARIANTS; case 0: return VARIANTS_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public VariantsCase getVariantsCase() { return VariantsCase.forNumber(variantsCase_); } public static final int VERSION_VARIANTS_FIELD_NUMBER = 1; /** * * * <pre> * The flow versions as the variants. * </pre> * * <code>.google.cloud.dialogflow.cx.v3beta1.VersionVariants version_variants = 1;</code> * * @return Whether the versionVariants field is set. */ @java.lang.Override public boolean hasVersionVariants() { return variantsCase_ == 1; } /** * * * <pre> * The flow versions as the variants. * </pre> * * <code>.google.cloud.dialogflow.cx.v3beta1.VersionVariants version_variants = 1;</code> * * @return The versionVariants. */ @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.VersionVariants getVersionVariants() { if (variantsCase_ == 1) { return (com.google.cloud.dialogflow.cx.v3beta1.VersionVariants) variants_; } return com.google.cloud.dialogflow.cx.v3beta1.VersionVariants.getDefaultInstance(); } /** * * * <pre> * The flow versions as the variants. * </pre> * * <code>.google.cloud.dialogflow.cx.v3beta1.VersionVariants version_variants = 1;</code> */ @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.VersionVariantsOrBuilder getVersionVariantsOrBuilder() { if (variantsCase_ == 1) { return (com.google.cloud.dialogflow.cx.v3beta1.VersionVariants) variants_; } return com.google.cloud.dialogflow.cx.v3beta1.VersionVariants.getDefaultInstance(); } public static final int UPDATE_TIME_FIELD_NUMBER = 2; private com.google.protobuf.Timestamp updateTime_; /** * * * <pre> * Update time of the variants. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> * * @return Whether the updateTime field is set. */ @java.lang.Override public boolean hasUpdateTime() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Update time of the variants. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> * * @return The updateTime. */ @java.lang.Override public com.google.protobuf.Timestamp getUpdateTime() { return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_; } /** * * * <pre> * Update time of the variants. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ @java.lang.Override public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() { return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (variantsCase_ == 1) { output.writeMessage(1, (com.google.cloud.dialogflow.cx.v3beta1.VersionVariants) variants_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getUpdateTime()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (variantsCase_ == 1) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 1, (com.google.cloud.dialogflow.cx.v3beta1.VersionVariants) variants_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateTime()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory)) { return super.equals(obj); } com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory other = (com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory) obj; if (hasUpdateTime() != other.hasUpdateTime()) return false; if (hasUpdateTime()) { if (!getUpdateTime().equals(other.getUpdateTime())) return false; } if (!getVariantsCase().equals(other.getVariantsCase())) return false; switch (variantsCase_) { case 1: if (!getVersionVariants().equals(other.getVersionVariants())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasUpdateTime()) { hash = (37 * hash) + UPDATE_TIME_FIELD_NUMBER; hash = (53 * hash) + getUpdateTime().hashCode(); } switch (variantsCase_) { case 1: hash = (37 * hash) + VERSION_VARIANTS_FIELD_NUMBER; hash = (53 * hash) + getVersionVariants().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The history of variants update. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.VariantsHistory} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3beta1.VariantsHistory) com.google.cloud.dialogflow.cx.v3beta1.VariantsHistoryOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3beta1.ExperimentProto .internal_static_google_cloud_dialogflow_cx_v3beta1_VariantsHistory_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3beta1.ExperimentProto .internal_static_google_cloud_dialogflow_cx_v3beta1_VariantsHistory_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory.class, com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory.Builder.class); } // Construct using com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getUpdateTimeFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (versionVariantsBuilder_ != null) { versionVariantsBuilder_.clear(); } updateTime_ = null; if (updateTimeBuilder_ != null) { updateTimeBuilder_.dispose(); updateTimeBuilder_ = null; } variantsCase_ = 0; variants_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dialogflow.cx.v3beta1.ExperimentProto .internal_static_google_cloud_dialogflow_cx_v3beta1_VariantsHistory_descriptor; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory getDefaultInstanceForType() { return com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory build() { com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory buildPartial() { com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory result = new com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.updateTime_ = updateTimeBuilder_ == null ? updateTime_ : updateTimeBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } private void buildPartialOneofs(com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory result) { result.variantsCase_ = variantsCase_; result.variants_ = this.variants_; if (variantsCase_ == 1 && versionVariantsBuilder_ != null) { result.variants_ = versionVariantsBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory) { return mergeFrom((com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory other) { if (other == com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory.getDefaultInstance()) return this; if (other.hasUpdateTime()) { mergeUpdateTime(other.getUpdateTime()); } switch (other.getVariantsCase()) { case VERSION_VARIANTS: { mergeVersionVariants(other.getVersionVariants()); break; } case VARIANTS_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getVersionVariantsFieldBuilder().getBuilder(), extensionRegistry); variantsCase_ = 1; break; } // case 10 case 18: { input.readMessage(getUpdateTimeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int variantsCase_ = 0; private java.lang.Object variants_; public VariantsCase getVariantsCase() { return VariantsCase.forNumber(variantsCase_); } public Builder clearVariants() { variantsCase_ = 0; variants_ = null; onChanged(); return this; } private int bitField0_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.cx.v3beta1.VersionVariants, com.google.cloud.dialogflow.cx.v3beta1.VersionVariants.Builder, com.google.cloud.dialogflow.cx.v3beta1.VersionVariantsOrBuilder> versionVariantsBuilder_; /** * * * <pre> * The flow versions as the variants. * </pre> * * <code>.google.cloud.dialogflow.cx.v3beta1.VersionVariants version_variants = 1;</code> * * @return Whether the versionVariants field is set. */ @java.lang.Override public boolean hasVersionVariants() { return variantsCase_ == 1; } /** * * * <pre> * The flow versions as the variants. * </pre> * * <code>.google.cloud.dialogflow.cx.v3beta1.VersionVariants version_variants = 1;</code> * * @return The versionVariants. */ @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.VersionVariants getVersionVariants() { if (versionVariantsBuilder_ == null) { if (variantsCase_ == 1) { return (com.google.cloud.dialogflow.cx.v3beta1.VersionVariants) variants_; } return com.google.cloud.dialogflow.cx.v3beta1.VersionVariants.getDefaultInstance(); } else { if (variantsCase_ == 1) { return versionVariantsBuilder_.getMessage(); } return com.google.cloud.dialogflow.cx.v3beta1.VersionVariants.getDefaultInstance(); } } /** * * * <pre> * The flow versions as the variants. * </pre> * * <code>.google.cloud.dialogflow.cx.v3beta1.VersionVariants version_variants = 1;</code> */ public Builder setVersionVariants( com.google.cloud.dialogflow.cx.v3beta1.VersionVariants value) { if (versionVariantsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } variants_ = value; onChanged(); } else { versionVariantsBuilder_.setMessage(value); } variantsCase_ = 1; return this; } /** * * * <pre> * The flow versions as the variants. * </pre> * * <code>.google.cloud.dialogflow.cx.v3beta1.VersionVariants version_variants = 1;</code> */ public Builder setVersionVariants( com.google.cloud.dialogflow.cx.v3beta1.VersionVariants.Builder builderForValue) { if (versionVariantsBuilder_ == null) { variants_ = builderForValue.build(); onChanged(); } else { versionVariantsBuilder_.setMessage(builderForValue.build()); } variantsCase_ = 1; return this; } /** * * * <pre> * The flow versions as the variants. * </pre> * * <code>.google.cloud.dialogflow.cx.v3beta1.VersionVariants version_variants = 1;</code> */ public Builder mergeVersionVariants( com.google.cloud.dialogflow.cx.v3beta1.VersionVariants value) { if (versionVariantsBuilder_ == null) { if (variantsCase_ == 1 && variants_ != com.google.cloud.dialogflow.cx.v3beta1.VersionVariants.getDefaultInstance()) { variants_ = com.google.cloud.dialogflow.cx.v3beta1.VersionVariants.newBuilder( (com.google.cloud.dialogflow.cx.v3beta1.VersionVariants) variants_) .mergeFrom(value) .buildPartial(); } else { variants_ = value; } onChanged(); } else { if (variantsCase_ == 1) { versionVariantsBuilder_.mergeFrom(value); } else { versionVariantsBuilder_.setMessage(value); } } variantsCase_ = 1; return this; } /** * * * <pre> * The flow versions as the variants. * </pre> * * <code>.google.cloud.dialogflow.cx.v3beta1.VersionVariants version_variants = 1;</code> */ public Builder clearVersionVariants() { if (versionVariantsBuilder_ == null) { if (variantsCase_ == 1) { variantsCase_ = 0; variants_ = null; onChanged(); } } else { if (variantsCase_ == 1) { variantsCase_ = 0; variants_ = null; } versionVariantsBuilder_.clear(); } return this; } /** * * * <pre> * The flow versions as the variants. * </pre> * * <code>.google.cloud.dialogflow.cx.v3beta1.VersionVariants version_variants = 1;</code> */ public com.google.cloud.dialogflow.cx.v3beta1.VersionVariants.Builder getVersionVariantsBuilder() { return getVersionVariantsFieldBuilder().getBuilder(); } /** * * * <pre> * The flow versions as the variants. * </pre> * * <code>.google.cloud.dialogflow.cx.v3beta1.VersionVariants version_variants = 1;</code> */ @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.VersionVariantsOrBuilder getVersionVariantsOrBuilder() { if ((variantsCase_ == 1) && (versionVariantsBuilder_ != null)) { return versionVariantsBuilder_.getMessageOrBuilder(); } else { if (variantsCase_ == 1) { return (com.google.cloud.dialogflow.cx.v3beta1.VersionVariants) variants_; } return com.google.cloud.dialogflow.cx.v3beta1.VersionVariants.getDefaultInstance(); } } /** * * * <pre> * The flow versions as the variants. * </pre> * * <code>.google.cloud.dialogflow.cx.v3beta1.VersionVariants version_variants = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.cx.v3beta1.VersionVariants, com.google.cloud.dialogflow.cx.v3beta1.VersionVariants.Builder, com.google.cloud.dialogflow.cx.v3beta1.VersionVariantsOrBuilder> getVersionVariantsFieldBuilder() { if (versionVariantsBuilder_ == null) { if (!(variantsCase_ == 1)) { variants_ = com.google.cloud.dialogflow.cx.v3beta1.VersionVariants.getDefaultInstance(); } versionVariantsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.cx.v3beta1.VersionVariants, com.google.cloud.dialogflow.cx.v3beta1.VersionVariants.Builder, com.google.cloud.dialogflow.cx.v3beta1.VersionVariantsOrBuilder>( (com.google.cloud.dialogflow.cx.v3beta1.VersionVariants) variants_, getParentForChildren(), isClean()); variants_ = null; } variantsCase_ = 1; onChanged(); return versionVariantsBuilder_; } private com.google.protobuf.Timestamp updateTime_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> updateTimeBuilder_; /** * * * <pre> * Update time of the variants. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> * * @return Whether the updateTime field is set. */ public boolean hasUpdateTime() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Update time of the variants. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> * * @return The updateTime. */ public com.google.protobuf.Timestamp getUpdateTime() { if (updateTimeBuilder_ == null) { return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_; } else { return updateTimeBuilder_.getMessage(); } } /** * * * <pre> * Update time of the variants. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ public Builder setUpdateTime(com.google.protobuf.Timestamp value) { if (updateTimeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateTime_ = value; } else { updateTimeBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Update time of the variants. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ public Builder setUpdateTime(com.google.protobuf.Timestamp.Builder builderForValue) { if (updateTimeBuilder_ == null) { updateTime_ = builderForValue.build(); } else { updateTimeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Update time of the variants. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ public Builder mergeUpdateTime(com.google.protobuf.Timestamp value) { if (updateTimeBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateTime_ != null && updateTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { getUpdateTimeBuilder().mergeFrom(value); } else { updateTime_ = value; } } else { updateTimeBuilder_.mergeFrom(value); } if (updateTime_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Update time of the variants. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ public Builder clearUpdateTime() { bitField0_ = (bitField0_ & ~0x00000002); updateTime_ = null; if (updateTimeBuilder_ != null) { updateTimeBuilder_.dispose(); updateTimeBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Update time of the variants. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ public com.google.protobuf.Timestamp.Builder getUpdateTimeBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateTimeFieldBuilder().getBuilder(); } /** * * * <pre> * Update time of the variants. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() { if (updateTimeBuilder_ != null) { return updateTimeBuilder_.getMessageOrBuilder(); } else { return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_; } } /** * * * <pre> * Update time of the variants. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> getUpdateTimeFieldBuilder() { if (updateTimeBuilder_ == null) { updateTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( getUpdateTime(), getParentForChildren(), isClean()); updateTime_ = null; } return updateTimeBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3beta1.VariantsHistory) } // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3beta1.VariantsHistory) private static final com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory(); } public static com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<VariantsHistory> PARSER = new com.google.protobuf.AbstractParser<VariantsHistory>() { @java.lang.Override public VariantsHistory parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<VariantsHistory> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<VariantsHistory> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.VariantsHistory getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,245
java-resourcemanager/google-cloud-resourcemanager/src/test/java/com/google/cloud/resourcemanager/ResourceManagerImplTest.java
/* * Copyright 2015 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.resourcemanager; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import com.google.api.gax.paging.Page; import com.google.api.services.cloudresourcemanager.model.Constraint; import com.google.api.services.cloudresourcemanager.model.OrgPolicy; import com.google.cloud.Identity; import com.google.cloud.Policy; import com.google.cloud.Role; import com.google.cloud.resourcemanager.OrgPolicyInfo.BoolPolicy; import com.google.cloud.resourcemanager.ProjectInfo.ResourceId; import com.google.cloud.resourcemanager.ResourceManager.ProjectField; import com.google.cloud.resourcemanager.ResourceManager.ProjectGetOption; import com.google.cloud.resourcemanager.ResourceManager.ProjectListOption; import com.google.cloud.resourcemanager.spi.ResourceManagerRpcFactory; import com.google.cloud.resourcemanager.spi.v1beta1.ResourceManagerRpc; import com.google.cloud.resourcemanager.spi.v1beta1.ResourceManagerRpc.ListResult; import com.google.cloud.resourcemanager.testing.LocalResourceManagerHelper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import java.io.IOException; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Map; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; @RunWith(MockitoJUnitRunner.class) public class ResourceManagerImplTest { private static final LocalResourceManagerHelper RESOURCE_MANAGER_HELPER = LocalResourceManagerHelper.create(); private static final ResourceManager RESOURCE_MANAGER = RESOURCE_MANAGER_HELPER.getOptions().getService(); private static final ProjectGetOption GET_FIELDS = ProjectGetOption.fields(ProjectField.NAME, ProjectField.CREATE_TIME); private static final ProjectListOption LIST_FIELDS = ProjectListOption.fields(ProjectField.NAME, ProjectField.LABELS); private static final ProjectListOption LIST_FILTER = ProjectListOption.filter("id:* name:myProject labels.color:blue LABELS.SIZE:*"); private static final ProjectInfo PARTIAL_PROJECT = ProjectInfo.newBuilder("partial-project").build(); private static final ResourceId PARENT = new ResourceId("id", "type"); private static final ProjectInfo COMPLETE_PROJECT = ProjectInfo.newBuilder("complete-project") .setName("name") .setLabels(ImmutableMap.of("k1", "v1")) .setParent(PARENT) .build(); private static final Map<ResourceManagerRpc.Option, ?> EMPTY_RPC_OPTIONS = ImmutableMap.of(); private static final Policy POLICY = Policy.newBuilder() .addIdentity(Role.owner(), Identity.user("me@gmail.com")) .addIdentity(Role.editor(), Identity.serviceAccount("serviceaccount@gmail.com")) .build(); private static final String CURSOR = "cursor"; private static final String RESOURCE = "folders/my-folder"; private static final String CONSTRAINTS = "constraints/serviceuser.services"; private static final String ETAG = "abcd12"; private static final String UPDATE_TIME = "014-10-02T15:01:23.045123456Z"; private static final String NAME = "constraints/serviceuser.services"; private static final String CONSTRAINT_DEFAULT = "ALLOW"; private static final String DISPLAY_NAME = "constraints-display-name"; private static final String DESCRIPTION = "Detailed description of what this Constraint controls as well as how and where it is" + " enforced"; private static final Integer VERSION = 1; private static final BoolPolicy BOOLEAN_POLICY = new BoolPolicy(true); private static final OrgPolicyInfo.Policies LIST_POLICY = new OrgPolicyInfo.Policies( "allvaluse", Arrays.asList("allowedValues"), Arrays.asList("deniedValues"), true, "suggestedValue"); private static final OrgPolicyInfo ORG_POLICY_INFO = OrgPolicyInfo.newBuilder() .setBoolPolicy(BOOLEAN_POLICY) .setConstraint(CONSTRAINTS) .setListPolicy(LIST_POLICY) .setEtag(ETAG) .setUpdateTime(UPDATE_TIME) .setVersion(VERSION) .build(); private static final ConstraintInfo.Constraints LIST_CONSTRAINT = new ConstraintInfo.Constraints("suggested-value", true); private static final ConstraintInfo CONSTRAINT_INFO = ConstraintInfo.newBuilder(NAME) .setConstraintDefault(CONSTRAINT_DEFAULT) .setDisplayName(DISPLAY_NAME) .setDescription(DESCRIPTION) .setConstraints(LIST_CONSTRAINT) .setVersion(VERSION) .build(); private ResourceManagerRpcFactory rpcFactoryMock = Mockito.mock(ResourceManagerRpcFactory.class); private ResourceManagerRpc resourceManagerRpcMock = Mockito.mock(ResourceManagerRpc.class); @BeforeClass public static void beforeClass() { RESOURCE_MANAGER_HELPER.start(); } @Before public void setUp() { clearProjects(); } private void clearProjects() { for (Project project : RESOURCE_MANAGER.list().getValues()) { RESOURCE_MANAGER_HELPER.removeProject(project.getProjectId()); } } @AfterClass public static void afterClass() { RESOURCE_MANAGER_HELPER.stop(); } private void compareReadWriteFields(ProjectInfo expected, ProjectInfo actual) { assertEquals(expected.getProjectId(), actual.getProjectId()); assertEquals(expected.getName(), actual.getName()); assertEquals(expected.getLabels(), actual.getLabels()); assertEquals(expected.getParent(), actual.getParent()); } @Test public void testCreate() { Project returnedProject = RESOURCE_MANAGER.create(PARTIAL_PROJECT); compareReadWriteFields(PARTIAL_PROJECT, returnedProject); assertEquals(ProjectInfo.State.ACTIVE, returnedProject.getState()); assertNull(returnedProject.getName()); assertNull(returnedProject.getParent()); assertNotNull(returnedProject.getProjectNumber()); assertNotNull(returnedProject.getCreateTimeMillis()); assertSame(RESOURCE_MANAGER, returnedProject.getResourceManager()); try { RESOURCE_MANAGER.create(PARTIAL_PROJECT); fail("Should fail, project already exists."); } catch (ResourceManagerException e) { assertEquals(409, e.getCode()); assertTrue( e.getMessage().startsWith("A project with the same project ID") && e.getMessage().endsWith("already exists.")); } returnedProject = RESOURCE_MANAGER.create(COMPLETE_PROJECT); compareReadWriteFields(COMPLETE_PROJECT, returnedProject); assertEquals(ProjectInfo.State.ACTIVE, returnedProject.getState()); assertNotNull(returnedProject.getProjectNumber()); assertNotNull(returnedProject.getCreateTimeMillis()); assertSame(RESOURCE_MANAGER, returnedProject.getResourceManager()); } @Test public void testDelete() { RESOURCE_MANAGER.create(COMPLETE_PROJECT); RESOURCE_MANAGER.delete(COMPLETE_PROJECT.getProjectId()); assertEquals( ProjectInfo.State.DELETE_REQUESTED, RESOURCE_MANAGER.get(COMPLETE_PROJECT.getProjectId()).getState()); try { RESOURCE_MANAGER.delete("some-nonexistant-project-id"); fail("Should fail because the project doesn't exist."); } catch (ResourceManagerException e) { assertEquals(403, e.getCode()); assertTrue(e.getMessage().contains("not found.")); } } @Test public void testGet() { RESOURCE_MANAGER.create(COMPLETE_PROJECT); Project returnedProject = RESOURCE_MANAGER.get(COMPLETE_PROJECT.getProjectId()); compareReadWriteFields(COMPLETE_PROJECT, returnedProject); assertEquals(RESOURCE_MANAGER, returnedProject.getResourceManager()); RESOURCE_MANAGER_HELPER.removeProject(COMPLETE_PROJECT.getProjectId()); assertNull(RESOURCE_MANAGER.get(COMPLETE_PROJECT.getProjectId())); } @Test public void testGetWithOptions() { Project originalProject = RESOURCE_MANAGER.create(COMPLETE_PROJECT); Project returnedProject = RESOURCE_MANAGER.get(COMPLETE_PROJECT.getProjectId(), GET_FIELDS); assertFalse(COMPLETE_PROJECT.equals(returnedProject)); assertEquals(COMPLETE_PROJECT.getProjectId(), returnedProject.getProjectId()); assertEquals(COMPLETE_PROJECT.getName(), returnedProject.getName()); assertEquals(originalProject.getCreateTimeMillis(), returnedProject.getCreateTimeMillis()); assertNull(returnedProject.getParent()); assertNull(returnedProject.getProjectNumber()); assertNull(returnedProject.getState()); assertTrue(returnedProject.getLabels().isEmpty()); assertEquals(RESOURCE_MANAGER, originalProject.getResourceManager()); assertEquals(RESOURCE_MANAGER, returnedProject.getResourceManager()); } @Test public void testList() { Page<Project> projects = RESOURCE_MANAGER.list(); assertFalse(projects.getValues().iterator().hasNext()); RESOURCE_MANAGER.create(PARTIAL_PROJECT); RESOURCE_MANAGER.create(COMPLETE_PROJECT); for (Project p : RESOURCE_MANAGER.list().getValues()) { if (p.getProjectId().equals(PARTIAL_PROJECT.getProjectId())) { compareReadWriteFields(PARTIAL_PROJECT, p); } else if (p.getProjectId().equals(COMPLETE_PROJECT.getProjectId())) { compareReadWriteFields(COMPLETE_PROJECT, p); } else { fail("Some unexpected project returned by list."); } assertSame(RESOURCE_MANAGER, p.getResourceManager()); } } @Test public void testListPaging() { RESOURCE_MANAGER.create(PARTIAL_PROJECT); RESOURCE_MANAGER.create(COMPLETE_PROJECT); Page<Project> page = RESOURCE_MANAGER.list(ProjectListOption.pageSize(1)); assertNotNull(page.getNextPageToken()); Iterator<Project> iterator = page.getValues().iterator(); compareReadWriteFields(COMPLETE_PROJECT, iterator.next()); assertFalse(iterator.hasNext()); page = page.getNextPage(); iterator = page.getValues().iterator(); compareReadWriteFields(PARTIAL_PROJECT, iterator.next()); assertFalse(iterator.hasNext()); assertNull(page.getNextPageToken()); } @Test public void testListFieldOptions() { RESOURCE_MANAGER.create(COMPLETE_PROJECT); Page<Project> projects = RESOURCE_MANAGER.list(LIST_FIELDS); Project returnedProject = projects.iterateAll().iterator().next(); assertEquals(COMPLETE_PROJECT.getProjectId(), returnedProject.getProjectId()); assertEquals(COMPLETE_PROJECT.getName(), returnedProject.getName()); assertEquals(COMPLETE_PROJECT.getLabels(), returnedProject.getLabels()); assertNull(returnedProject.getParent()); assertNull(returnedProject.getProjectNumber()); assertNull(returnedProject.getState()); assertNull(returnedProject.getCreateTimeMillis()); assertSame(RESOURCE_MANAGER, returnedProject.getResourceManager()); } @Test public void testListPagingWithFieldOptions() { RESOURCE_MANAGER.create(PARTIAL_PROJECT); RESOURCE_MANAGER.create(COMPLETE_PROJECT); Page<Project> projects = RESOURCE_MANAGER.list(LIST_FIELDS, ProjectListOption.pageSize(1)); assertNotNull(projects.getNextPageToken()); Iterator<Project> iterator = projects.getValues().iterator(); Project returnedProject = iterator.next(); assertEquals(COMPLETE_PROJECT.getProjectId(), returnedProject.getProjectId()); assertEquals(COMPLETE_PROJECT.getName(), returnedProject.getName()); assertEquals(COMPLETE_PROJECT.getLabels(), returnedProject.getLabels()); assertNull(returnedProject.getParent()); assertNull(returnedProject.getProjectNumber()); assertNull(returnedProject.getState()); assertNull(returnedProject.getCreateTimeMillis()); assertSame(RESOURCE_MANAGER, returnedProject.getResourceManager()); assertFalse(iterator.hasNext()); projects = projects.getNextPage(); iterator = projects.getValues().iterator(); returnedProject = iterator.next(); assertEquals(PARTIAL_PROJECT.getProjectId(), returnedProject.getProjectId()); assertEquals(PARTIAL_PROJECT.getName(), returnedProject.getName()); assertEquals(PARTIAL_PROJECT.getLabels(), returnedProject.getLabels()); assertNull(returnedProject.getParent()); assertNull(returnedProject.getProjectNumber()); assertNull(returnedProject.getState()); assertNull(returnedProject.getCreateTimeMillis()); assertSame(RESOURCE_MANAGER, returnedProject.getResourceManager()); assertFalse(iterator.hasNext()); assertNull(projects.getNextPageToken()); } @Test public void testListFilterOptions() { ProjectInfo matchingProject = ProjectInfo.newBuilder("matching-project") .setName("MyProject") .setLabels(ImmutableMap.of("color", "blue", "size", "big")) .build(); ProjectInfo nonMatchingProject1 = ProjectInfo.newBuilder("non-matching-project1") .setName("myProject") .setLabels(ImmutableMap.of("color", "blue")) .build(); ProjectInfo nonMatchingProject2 = ProjectInfo.newBuilder("non-matching-project2") .setName("myProj") .setLabels(ImmutableMap.of("color", "blue", "size", "big")) .build(); ProjectInfo nonMatchingProject3 = ProjectInfo.newBuilder("non-matching-project3").build(); RESOURCE_MANAGER.create(matchingProject); RESOURCE_MANAGER.create(nonMatchingProject1); RESOURCE_MANAGER.create(nonMatchingProject2); RESOURCE_MANAGER.create(nonMatchingProject3); for (Project p : RESOURCE_MANAGER.list(LIST_FILTER).getValues()) { assertFalse(p.equals(nonMatchingProject1)); assertFalse(p.equals(nonMatchingProject2)); compareReadWriteFields(matchingProject, p); assertSame(RESOURCE_MANAGER, p.getResourceManager()); } } @Test public void testReplace() { ProjectInfo createdProject = RESOURCE_MANAGER.create(COMPLETE_PROJECT); Map<String, String> newLabels = ImmutableMap.of("new k1", "new v1"); ProjectInfo anotherCompleteProject = ProjectInfo.newBuilder(COMPLETE_PROJECT.getProjectId()) .setLabels(newLabels) .setProjectNumber(987654321L) .setCreateTimeMillis(230682061315L) .setState(ProjectInfo.State.DELETE_REQUESTED) .setParent(createdProject.getParent()) .build(); Project returnedProject = RESOURCE_MANAGER.replace(anotherCompleteProject); compareReadWriteFields(anotherCompleteProject, returnedProject); assertEquals(createdProject.getProjectNumber(), returnedProject.getProjectNumber()); assertEquals(createdProject.getCreateTimeMillis(), returnedProject.getCreateTimeMillis()); assertEquals(createdProject.getState(), returnedProject.getState()); assertEquals(RESOURCE_MANAGER, returnedProject.getResourceManager()); ProjectInfo nonexistantProject = ProjectInfo.newBuilder("some-project-id-that-does-not-exist").build(); try { RESOURCE_MANAGER.replace(nonexistantProject); fail("Should fail because the project doesn't exist."); } catch (ResourceManagerException e) { assertEquals(403, e.getCode()); assertTrue(e.getMessage().contains("the project was not found")); } } @Test public void testUndelete() { RESOURCE_MANAGER.create(COMPLETE_PROJECT); RESOURCE_MANAGER.delete(COMPLETE_PROJECT.getProjectId()); assertEquals( ProjectInfo.State.DELETE_REQUESTED, RESOURCE_MANAGER.get(COMPLETE_PROJECT.getProjectId()).getState()); RESOURCE_MANAGER.undelete(COMPLETE_PROJECT.getProjectId()); ProjectInfo revivedProject = RESOURCE_MANAGER.get(COMPLETE_PROJECT.getProjectId()); compareReadWriteFields(COMPLETE_PROJECT, revivedProject); assertEquals(ProjectInfo.State.ACTIVE, revivedProject.getState()); try { RESOURCE_MANAGER.undelete("invalid-project-id"); fail("Should fail because the project doesn't exist."); } catch (ResourceManagerException e) { assertEquals(403, e.getCode()); assertTrue(e.getMessage().contains("the project was not found")); } } @Test public void testGetPolicy() { assertNull(RESOURCE_MANAGER.getPolicy(COMPLETE_PROJECT.getProjectId())); RESOURCE_MANAGER.create(COMPLETE_PROJECT); RESOURCE_MANAGER.replacePolicy(COMPLETE_PROJECT.getProjectId(), POLICY); Policy retrieved = RESOURCE_MANAGER.getPolicy(COMPLETE_PROJECT.getProjectId()); assertEquals(POLICY.getBindings(), retrieved.getBindings()); assertNotNull(retrieved.getEtag()); assertEquals(0, retrieved.getVersion()); } @Test public void testReplacePolicy() { try { RESOURCE_MANAGER.replacePolicy("nonexistent-project", POLICY); fail("Project doesn't exist."); } catch (ResourceManagerException e) { assertEquals(403, e.getCode()); assertTrue(e.getMessage().endsWith("project was not found.")); } RESOURCE_MANAGER.create(PARTIAL_PROJECT); Policy oldPolicy = RESOURCE_MANAGER.getPolicy(PARTIAL_PROJECT.getProjectId()); RESOURCE_MANAGER.replacePolicy(PARTIAL_PROJECT.getProjectId(), POLICY); try { RESOURCE_MANAGER.replacePolicy(PARTIAL_PROJECT.getProjectId(), oldPolicy); fail("Policy with an invalid etag didn't cause error."); } catch (ResourceManagerException e) { assertEquals(409, e.getCode()); assertTrue(e.getMessage().contains("Policy etag mismatch")); } String originalEtag = RESOURCE_MANAGER.getPolicy(PARTIAL_PROJECT.getProjectId()).getEtag(); Policy newPolicy = RESOURCE_MANAGER.replacePolicy(PARTIAL_PROJECT.getProjectId(), POLICY); assertEquals(POLICY.getBindings(), newPolicy.getBindings()); assertNotNull(newPolicy.getEtag()); assertNotEquals(originalEtag, newPolicy.getEtag()); } @Test public void testTestPermissions() { List<String> permissions = ImmutableList.of("resourcemanager.projects.get"); try { RESOURCE_MANAGER.testPermissions("nonexistent-project", permissions); fail("Nonexistent project"); } catch (ResourceManagerException e) { assertEquals(403, e.getCode()); assertEquals("Project nonexistent-project not found.", e.getMessage()); } RESOURCE_MANAGER.create(PARTIAL_PROJECT); assertEquals( ImmutableList.of(true), RESOURCE_MANAGER.testPermissions(PARTIAL_PROJECT.getProjectId(), permissions)); } @Test public void testRetryableException() { when(rpcFactoryMock.create(Mockito.any(ResourceManagerOptions.class))) .thenReturn(resourceManagerRpcMock); ResourceManager resourceManager = ResourceManagerOptions.newBuilder() .setServiceRpcFactory(rpcFactoryMock) .build() .getService(); String exceptionMessage = "Internal Error"; doThrow(new ResourceManagerException(500, exceptionMessage)) .when(resourceManagerRpcMock) .get(PARTIAL_PROJECT.getProjectId(), EMPTY_RPC_OPTIONS); try { resourceManager.get(PARTIAL_PROJECT.getProjectId()); } catch (ResourceManagerException expected) { assertEquals(500, expected.getCode()); assertEquals(exceptionMessage, expected.getMessage()); } } @Test public void testNonRetryableException() { when(rpcFactoryMock.create(Mockito.any(ResourceManagerOptions.class))) .thenReturn(resourceManagerRpcMock); ResourceManager resourceManager = ResourceManagerOptions.newBuilder() .setServiceRpcFactory(rpcFactoryMock) .build() .getService(); String exceptionMessage = "Project " + PARTIAL_PROJECT.getProjectId() + " not found."; doThrow(new ResourceManagerException(404, exceptionMessage)) .when(resourceManagerRpcMock) .get(PARTIAL_PROJECT.getProjectId(), EMPTY_RPC_OPTIONS); try { resourceManager.get(PARTIAL_PROJECT.getProjectId()); } catch (ResourceManagerException expected) { assertEquals(404, expected.getCode()); assertEquals(exceptionMessage, expected.getMessage()); } } @Test public void testRuntimeException() { when(rpcFactoryMock.create(Mockito.any(ResourceManagerOptions.class))) .thenReturn(resourceManagerRpcMock); ResourceManager resourceManager = ResourceManagerOptions.newBuilder() .setServiceRpcFactory(rpcFactoryMock) .build() .getService(); String exceptionMessage = "Artificial runtime exception"; doThrow(new RuntimeException(exceptionMessage)) .when(resourceManagerRpcMock) .get(PARTIAL_PROJECT.getProjectId(), EMPTY_RPC_OPTIONS); try { resourceManager.get(PARTIAL_PROJECT.getProjectId()); } catch (RuntimeException expected) { assertTrue(expected.getMessage().contains(exceptionMessage)); } } @Test public void testTestOrgPermissions() throws IOException { String organization = "organization/12345"; List<String> permissions = ImmutableList.of( "resourcemanager.organizations.get", "resourcemanager.organizations.getIamPolicy"); Map<String, Boolean> expected = ImmutableMap.of( "resourcemanager.organizations.get", true, "resourcemanager.organizations.getIamPolicy", false); when(rpcFactoryMock.create(Mockito.any(ResourceManagerOptions.class))) .thenReturn(resourceManagerRpcMock); ResourceManager resourceManager = ResourceManagerOptions.newBuilder() .setServiceRpcFactory(rpcFactoryMock) .build() .getService(); when(resourceManagerRpcMock.testOrgPermissions(organization, permissions)).thenReturn(expected); Map<String, Boolean> actual = resourceManager.testOrgPermissions(organization, permissions); assertEquals(expected, actual); verify(resourceManagerRpcMock).testOrgPermissions(organization, permissions); } @Test public void testTestOrgPermissionsWithResourceManagerException() throws IOException { String organization = "organizations/12345"; String exceptionMessage = "Not Found"; List<String> permissions = ImmutableList.of( "resourcemanager.organizations.get", "resourcemanager.organizations.getIamPolicy"); when(rpcFactoryMock.create(Mockito.any(ResourceManagerOptions.class))) .thenReturn(resourceManagerRpcMock); ResourceManager resourceManager = ResourceManagerOptions.newBuilder() .setServiceRpcFactory(rpcFactoryMock) .build() .getService(); doThrow(new ResourceManagerException(404, exceptionMessage)) .when(resourceManagerRpcMock) .testOrgPermissions(organization, permissions); try { resourceManager.testOrgPermissions(organization, permissions); } catch (ResourceManagerException expected) { assertEquals(404, expected.getCode()); assertEquals(exceptionMessage, expected.getMessage()); } } @Test public void testClearOrgPolicy() throws IOException { when(rpcFactoryMock.create(Mockito.any(ResourceManagerOptions.class))) .thenReturn(resourceManagerRpcMock); ResourceManager resourceManager = ResourceManagerOptions.newBuilder() .setServiceRpcFactory(rpcFactoryMock) .build() .getService(); doNothing().when(resourceManagerRpcMock).clearOrgPolicy(RESOURCE, ORG_POLICY_INFO.toProtobuf()); resourceManager.clearOrgPolicy(RESOURCE, ORG_POLICY_INFO); verify(resourceManagerRpcMock).clearOrgPolicy(RESOURCE, ORG_POLICY_INFO.toProtobuf()); } @Test public void testClearOrgPolicyWithResourceManagerException() throws IOException { when(rpcFactoryMock.create(Mockito.any(ResourceManagerOptions.class))) .thenReturn(resourceManagerRpcMock); ResourceManager resourceManager = ResourceManagerOptions.newBuilder() .setServiceRpcFactory(rpcFactoryMock) .build() .getService(); String exceptionMessage = "Should fail because the organization policy doesn't exist."; doThrow(new ResourceManagerException(404, exceptionMessage)) .when(resourceManagerRpcMock) .clearOrgPolicy(RESOURCE, ORG_POLICY_INFO.toProtobuf()); try { resourceManager.clearOrgPolicy(RESOURCE, ORG_POLICY_INFO); } catch (ResourceManagerException expected) { assertEquals(404, expected.getCode()); assertEquals(exceptionMessage, expected.getMessage()); } } @Test public void testGetEffectiveOrgPolicy() throws IOException { when(rpcFactoryMock.create(Mockito.any(ResourceManagerOptions.class))) .thenReturn(resourceManagerRpcMock); ResourceManager resourceManager = ResourceManagerOptions.newBuilder() .setServiceRpcFactory(rpcFactoryMock) .build() .getService(); when(resourceManagerRpcMock.getEffectiveOrgPolicy(RESOURCE, CONSTRAINTS)) .thenReturn(ORG_POLICY_INFO.toProtobuf()); OrgPolicyInfo policyInfo = resourceManager.getEffectiveOrgPolicy(RESOURCE, CONSTRAINTS); assertEquals(CONSTRAINTS, policyInfo.getConstraint()); assertEquals(BOOLEAN_POLICY, policyInfo.getBoolPolicy()); assertEquals(ETAG, policyInfo.getEtag()); assertEquals(LIST_POLICY, policyInfo.getPolicies()); assertEquals(UPDATE_TIME, policyInfo.getUpdateTime()); assertEquals(VERSION, policyInfo.getVersion()); verify(resourceManagerRpcMock).getEffectiveOrgPolicy(RESOURCE, CONSTRAINTS); } @Test public void testGetEffectiveOrgPolicyWithResourceManagerException() throws IOException { when(rpcFactoryMock.create(Mockito.any(ResourceManagerOptions.class))) .thenReturn(resourceManagerRpcMock); ResourceManager resourceManager = ResourceManagerOptions.newBuilder() .setServiceRpcFactory(rpcFactoryMock) .build() .getService(); String exceptionMessage = "Not Found"; doThrow(new ResourceManagerException(404, exceptionMessage)) .when(resourceManagerRpcMock) .getEffectiveOrgPolicy(RESOURCE, CONSTRAINTS); try { resourceManager.getEffectiveOrgPolicy(RESOURCE, CONSTRAINTS); } catch (ResourceManagerException expected) { assertEquals(404, expected.getCode()); assertEquals(exceptionMessage, expected.getMessage()); } } @Test public void testGetOrgPolicy() throws IOException { when(rpcFactoryMock.create(Mockito.any(ResourceManagerOptions.class))) .thenReturn(resourceManagerRpcMock); ResourceManager resourceManager = ResourceManagerOptions.newBuilder() .setServiceRpcFactory(rpcFactoryMock) .build() .getService(); when(resourceManagerRpcMock.getOrgPolicy(RESOURCE, CONSTRAINTS)) .thenReturn(ORG_POLICY_INFO.toProtobuf()); OrgPolicyInfo policyInfo = resourceManager.getOrgPolicy(RESOURCE, CONSTRAINTS); assertEquals(CONSTRAINTS, policyInfo.getConstraint()); assertEquals(BOOLEAN_POLICY, policyInfo.getBoolPolicy()); assertEquals(LIST_POLICY, policyInfo.getPolicies()); assertEquals(UPDATE_TIME, policyInfo.getUpdateTime()); assertEquals(VERSION, policyInfo.getVersion()); verify(resourceManagerRpcMock).getOrgPolicy(RESOURCE, CONSTRAINTS); } @Test public void testGetOrgPolicyWithResourceManagerException() throws IOException { when(rpcFactoryMock.create(Mockito.any(ResourceManagerOptions.class))) .thenReturn(resourceManagerRpcMock); ResourceManager resourceManager = ResourceManagerOptions.newBuilder() .setServiceRpcFactory(rpcFactoryMock) .build() .getService(); String exceptionMessage = "Not Found"; doThrow(new ResourceManagerException(404, exceptionMessage)) .when(resourceManagerRpcMock) .getOrgPolicy(RESOURCE, CONSTRAINTS); try { resourceManager.getOrgPolicy(RESOURCE, CONSTRAINTS); } catch (ResourceManagerException expected) { assertEquals(404, expected.getCode()); assertEquals(exceptionMessage, expected.getMessage()); } } @Test public void testListAvailableOrgPolicyConstraints() throws IOException { when(rpcFactoryMock.create(Mockito.any(ResourceManagerOptions.class))) .thenReturn(resourceManagerRpcMock); ResourceManager resourceManager = ResourceManagerOptions.newBuilder() .setServiceRpcFactory(rpcFactoryMock) .build() .getService(); ListResult<Constraint> expectedResult = ListResult.of(CURSOR, ImmutableList.of(CONSTRAINT_INFO.toProtobuf())); when(resourceManagerRpcMock.listAvailableOrgPolicyConstraints(NAME, EMPTY_RPC_OPTIONS)) .thenReturn(expectedResult); Page<ConstraintInfo> page = resourceManager.listAvailableOrgPolicyConstraints(NAME); assertEquals(CURSOR, page.getNextPageToken()); for (ConstraintInfo constraintInfo : page.getValues()) { assertEquals(NAME, constraintInfo.getName()); assertEquals(CONSTRAINT_DEFAULT, constraintInfo.getConstraintDefault()); assertEquals(DISPLAY_NAME, constraintInfo.getDisplayName()); assertEquals(DESCRIPTION, constraintInfo.getDescription()); assertEquals(LIST_CONSTRAINT, constraintInfo.getConstraints()); assertEquals(VERSION, constraintInfo.getVersion()); } verify(resourceManagerRpcMock).listAvailableOrgPolicyConstraints(NAME, EMPTY_RPC_OPTIONS); } @Test public void listAvailableOrgPolicyConstraintsWithResourceManagerException() throws IOException { when(rpcFactoryMock.create(Mockito.any(ResourceManagerOptions.class))) .thenReturn(resourceManagerRpcMock); ResourceManager resourceManager = ResourceManagerOptions.newBuilder() .setServiceRpcFactory(rpcFactoryMock) .build() .getService(); String exceptionMessage = "Not Found"; doThrow(new ResourceManagerException(404, exceptionMessage)) .when(resourceManagerRpcMock) .listAvailableOrgPolicyConstraints(RESOURCE, EMPTY_RPC_OPTIONS); try { resourceManager.listAvailableOrgPolicyConstraints(RESOURCE); } catch (ResourceManagerException expected) { assertEquals(404, expected.getCode()); assertEquals(exceptionMessage, expected.getMessage()); } } @Test public void testListOrgPolicies() throws IOException { when(rpcFactoryMock.create(Mockito.any(ResourceManagerOptions.class))) .thenReturn(resourceManagerRpcMock); ResourceManager resourceManager = ResourceManagerOptions.newBuilder() .setServiceRpcFactory(rpcFactoryMock) .build() .getService(); ListResult<OrgPolicy> expectedResult = ListResult.of(CURSOR, ImmutableList.of(ORG_POLICY_INFO.toProtobuf())); when(resourceManagerRpcMock.listOrgPolicies(RESOURCE, EMPTY_RPC_OPTIONS)) .thenReturn(expectedResult); Page<OrgPolicyInfo> policies = resourceManager.listOrgPolicies(RESOURCE); assertEquals(CURSOR, policies.getNextPageToken()); for (OrgPolicyInfo orgPolicyInfo : policies.getValues()) { assertEquals(CONSTRAINTS, orgPolicyInfo.getConstraint()); assertEquals(ETAG, orgPolicyInfo.getEtag()); assertEquals(BOOLEAN_POLICY, orgPolicyInfo.getBoolPolicy()); assertEquals(LIST_POLICY, orgPolicyInfo.getPolicies()); assertEquals(UPDATE_TIME, orgPolicyInfo.getUpdateTime()); assertEquals(VERSION, orgPolicyInfo.getVersion()); } verify(resourceManagerRpcMock).listOrgPolicies(RESOURCE, EMPTY_RPC_OPTIONS); } @Test public void testListOrgPoliciesWithResourceManagerException() throws IOException { when(rpcFactoryMock.create(Mockito.any(ResourceManagerOptions.class))) .thenReturn(resourceManagerRpcMock); ResourceManager resourceManager = ResourceManagerOptions.newBuilder() .setServiceRpcFactory(rpcFactoryMock) .build() .getService(); String exceptionMessage = "Not Found"; doThrow(new ResourceManagerException(404, exceptionMessage)) .when(resourceManagerRpcMock) .listOrgPolicies(RESOURCE, EMPTY_RPC_OPTIONS); try { resourceManager.listOrgPolicies(RESOURCE); } catch (ResourceManagerException expected) { assertEquals(404, expected.getCode()); assertEquals(exceptionMessage, expected.getMessage()); } } @Test public void testSetOrgPolicy() throws IOException { when(rpcFactoryMock.create(Mockito.any(ResourceManagerOptions.class))) .thenReturn(resourceManagerRpcMock); ResourceManager resourceManager = ResourceManagerOptions.newBuilder() .setServiceRpcFactory(rpcFactoryMock) .build() .getService(); when(resourceManagerRpcMock.replaceOrgPolicy(RESOURCE, ORG_POLICY_INFO.toProtobuf())) .thenReturn(ORG_POLICY_INFO.toProtobuf()); OrgPolicyInfo policyInfo = resourceManager.replaceOrgPolicy(RESOURCE, ORG_POLICY_INFO); assertEquals(CONSTRAINTS, policyInfo.getConstraint()); assertEquals(BOOLEAN_POLICY, policyInfo.getBoolPolicy()); assertEquals(LIST_POLICY, policyInfo.getPolicies()); assertEquals(UPDATE_TIME, policyInfo.getUpdateTime()); assertEquals(VERSION, policyInfo.getVersion()); verify(resourceManagerRpcMock).replaceOrgPolicy(RESOURCE, ORG_POLICY_INFO.toProtobuf()); } @Test public void testSetOrgPolicyWithResourceManagerException() throws IOException { when(rpcFactoryMock.create(Mockito.any(ResourceManagerOptions.class))) .thenReturn(resourceManagerRpcMock); ResourceManager resourceManager = ResourceManagerOptions.newBuilder() .setServiceRpcFactory(rpcFactoryMock) .build() .getService(); String exceptionMessage = "Not Found"; doThrow(new ResourceManagerException(404, exceptionMessage)) .when(resourceManagerRpcMock) .replaceOrgPolicy(RESOURCE, ORG_POLICY_INFO.toProtobuf()); try { resourceManager.replaceOrgPolicy(RESOURCE, ORG_POLICY_INFO); } catch (ResourceManagerException expected) { assertEquals(404, expected.getCode()); assertEquals(exceptionMessage, expected.getMessage()); } } }
googleapis/google-cloud-java
35,031
java-devicestreaming/proto-google-cloud-devicestreaming-v1/src/main/java/com/google/cloud/devicestreaming/v1/ListDeviceSessionsRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/devicestreaming/v1/service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.devicestreaming.v1; /** * * * <pre> * Request message for DirectAccessService.ListDeviceSessions. * </pre> * * Protobuf type {@code google.cloud.devicestreaming.v1.ListDeviceSessionsRequest} */ public final class ListDeviceSessionsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.devicestreaming.v1.ListDeviceSessionsRequest) ListDeviceSessionsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListDeviceSessionsRequest.newBuilder() to construct. private ListDeviceSessionsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListDeviceSessionsRequest() { parent_ = ""; pageToken_ = ""; filter_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListDeviceSessionsRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.devicestreaming.v1.ServiceProto .internal_static_google_cloud_devicestreaming_v1_ListDeviceSessionsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.devicestreaming.v1.ServiceProto .internal_static_google_cloud_devicestreaming_v1_ListDeviceSessionsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest.class, com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The name of the parent to request, e.g. "projects/{project_id}" * </pre> * * <code> * string parent = 4 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The name of the parent to request, e.g. "projects/{project_id}" * </pre> * * <code> * string parent = 4 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 1; private int pageSize_ = 0; /** * * * <pre> * Optional. The maximum number of DeviceSessions to return. * </pre> * * <code>int32 page_size = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. A continuation token for paging. * </pre> * * <code>string page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * Optional. A continuation token for paging. * </pre> * * <code>string page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILTER_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object filter_ = ""; /** * * * <pre> * Optional. If specified, responses will be filtered by the given filter. * Allowed fields are: session_state. * </pre> * * <code>string filter = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ @java.lang.Override public java.lang.String getFilter() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } } /** * * * <pre> * Optional. If specified, responses will be filtered by the given filter. * Allowed fields are: session_state. * </pre> * * <code>string filter = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ @java.lang.Override public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (pageSize_ != 0) { output.writeInt32(1, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, filter_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, parent_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(1, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, filter_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, parent_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest)) { return super.equals(obj); } com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest other = (com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest) obj; if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getFilter().equals(other.getFilter())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for DirectAccessService.ListDeviceSessions. * </pre> * * Protobuf type {@code google.cloud.devicestreaming.v1.ListDeviceSessionsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.devicestreaming.v1.ListDeviceSessionsRequest) com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.devicestreaming.v1.ServiceProto .internal_static_google_cloud_devicestreaming_v1_ListDeviceSessionsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.devicestreaming.v1.ServiceProto .internal_static_google_cloud_devicestreaming_v1_ListDeviceSessionsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest.class, com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest.Builder.class); } // Construct using com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; pageSize_ = 0; pageToken_ = ""; filter_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.devicestreaming.v1.ServiceProto .internal_static_google_cloud_devicestreaming_v1_ListDeviceSessionsRequest_descriptor; } @java.lang.Override public com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest getDefaultInstanceForType() { return com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest build() { com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest buildPartial() { com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest result = new com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageToken_ = pageToken_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.filter_ = filter_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest) { return mergeFrom((com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest other) { if (other == com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getFilter().isEmpty()) { filter_ = other.filter_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 8 case 18: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 18 case 26: { filter_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 26 case 34: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The name of the parent to request, e.g. "projects/{project_id}" * </pre> * * <code> * string parent = 4 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The name of the parent to request, e.g. "projects/{project_id}" * </pre> * * <code> * string parent = 4 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The name of the parent to request, e.g. "projects/{project_id}" * </pre> * * <code> * string parent = 4 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The name of the parent to request, e.g. "projects/{project_id}" * </pre> * * <code> * string parent = 4 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The name of the parent to request, e.g. "projects/{project_id}" * </pre> * * <code> * string parent = 4 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Optional. The maximum number of DeviceSessions to return. * </pre> * * <code>int32 page_size = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Optional. The maximum number of DeviceSessions to return. * </pre> * * <code>int32 page_size = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. The maximum number of DeviceSessions to return. * </pre> * * <code>int32 page_size = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000002); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. A continuation token for paging. * </pre> * * <code>string page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. A continuation token for paging. * </pre> * * <code>string page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. A continuation token for paging. * </pre> * * <code>string page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. A continuation token for paging. * </pre> * * <code>string page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Optional. A continuation token for paging. * </pre> * * <code>string page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object filter_ = ""; /** * * * <pre> * Optional. If specified, responses will be filtered by the given filter. * Allowed fields are: session_state. * </pre> * * <code>string filter = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ public java.lang.String getFilter() { java.lang.Object ref = filter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. If specified, responses will be filtered by the given filter. * Allowed fields are: session_state. * </pre> * * <code>string filter = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. If specified, responses will be filtered by the given filter. * Allowed fields are: session_state. * </pre> * * <code>string filter = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The filter to set. * @return This builder for chaining. */ public Builder setFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } filter_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Optional. If specified, responses will be filtered by the given filter. * Allowed fields are: session_state. * </pre> * * <code>string filter = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearFilter() { filter_ = getDefaultInstance().getFilter(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Optional. If specified, responses will be filtered by the given filter. * Allowed fields are: session_state. * </pre> * * <code>string filter = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for filter to set. * @return This builder for chaining. */ public Builder setFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); filter_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.devicestreaming.v1.ListDeviceSessionsRequest) } // @@protoc_insertion_point(class_scope:google.cloud.devicestreaming.v1.ListDeviceSessionsRequest) private static final com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest(); } public static com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListDeviceSessionsRequest> PARSER = new com.google.protobuf.AbstractParser<ListDeviceSessionsRequest>() { @java.lang.Override public ListDeviceSessionsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListDeviceSessionsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListDeviceSessionsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.devicestreaming.v1.ListDeviceSessionsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,032
java-dialogflow-cx/google-cloud-dialogflow-cx/src/test/java/com/google/cloud/dialogflow/cx/v3/ExperimentsClientTest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.dialogflow.cx.v3; import static com.google.cloud.dialogflow.cx.v3.ExperimentsClient.ListExperimentsPagedResponse; import static com.google.cloud.dialogflow.cx.v3.ExperimentsClient.ListLocationsPagedResponse; import com.google.api.gax.core.NoCredentialsProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.testing.LocalChannelProvider; import com.google.api.gax.grpc.testing.MockGrpcService; import com.google.api.gax.grpc.testing.MockServiceHelper; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.InvalidArgumentException; import com.google.cloud.location.GetLocationRequest; import com.google.cloud.location.ListLocationsRequest; import com.google.cloud.location.ListLocationsResponse; import com.google.cloud.location.Location; import com.google.common.collect.Lists; import com.google.protobuf.AbstractMessage; import com.google.protobuf.Any; import com.google.protobuf.Duration; import com.google.protobuf.Empty; import com.google.protobuf.FieldMask; import com.google.protobuf.Timestamp; import io.grpc.StatusRuntimeException; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.UUID; import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @Generated("by gapic-generator-java") public class ExperimentsClientTest { private static MockExperiments mockExperiments; private static MockLocations mockLocations; private static MockServiceHelper mockServiceHelper; private LocalChannelProvider channelProvider; private ExperimentsClient client; @BeforeClass public static void startStaticServer() { mockExperiments = new MockExperiments(); mockLocations = new MockLocations(); mockServiceHelper = new MockServiceHelper( UUID.randomUUID().toString(), Arrays.<MockGrpcService>asList(mockExperiments, mockLocations)); mockServiceHelper.start(); } @AfterClass public static void stopServer() { mockServiceHelper.stop(); } @Before public void setUp() throws IOException { mockServiceHelper.reset(); channelProvider = mockServiceHelper.createChannelProvider(); ExperimentsSettings settings = ExperimentsSettings.newBuilder() .setTransportChannelProvider(channelProvider) .setCredentialsProvider(NoCredentialsProvider.create()) .build(); client = ExperimentsClient.create(settings); } @After public void tearDown() throws Exception { client.close(); } @Test public void listExperimentsTest() throws Exception { Experiment responsesElement = Experiment.newBuilder().build(); ListExperimentsResponse expectedResponse = ListExperimentsResponse.newBuilder() .setNextPageToken("") .addAllExperiments(Arrays.asList(responsesElement)) .build(); mockExperiments.addResponse(expectedResponse); EnvironmentName parent = EnvironmentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]"); ListExperimentsPagedResponse pagedListResponse = client.listExperiments(parent); List<Experiment> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getExperimentsList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockExperiments.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListExperimentsRequest actualRequest = ((ListExperimentsRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listExperimentsExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockExperiments.addException(exception); try { EnvironmentName parent = EnvironmentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]"); client.listExperiments(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listExperimentsTest2() throws Exception { Experiment responsesElement = Experiment.newBuilder().build(); ListExperimentsResponse expectedResponse = ListExperimentsResponse.newBuilder() .setNextPageToken("") .addAllExperiments(Arrays.asList(responsesElement)) .build(); mockExperiments.addResponse(expectedResponse); String parent = "parent-995424086"; ListExperimentsPagedResponse pagedListResponse = client.listExperiments(parent); List<Experiment> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getExperimentsList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockExperiments.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListExperimentsRequest actualRequest = ((ListExperimentsRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listExperimentsExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockExperiments.addException(exception); try { String parent = "parent-995424086"; client.listExperiments(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getExperimentTest() throws Exception { Experiment expectedResponse = Experiment.newBuilder() .setName( ExperimentName.of( "[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]") .toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .setDefinition(Experiment.Definition.newBuilder().build()) .setRolloutConfig(RolloutConfig.newBuilder().build()) .setRolloutState(RolloutState.newBuilder().build()) .setRolloutFailureReason("rolloutFailureReason958797373") .setResult(Experiment.Result.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) .setLastUpdateTime(Timestamp.newBuilder().build()) .setExperimentLength(Duration.newBuilder().build()) .addAllVariantsHistory(new ArrayList<VariantsHistory>()) .build(); mockExperiments.addResponse(expectedResponse); ExperimentName name = ExperimentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]"); Experiment actualResponse = client.getExperiment(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockExperiments.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetExperimentRequest actualRequest = ((GetExperimentRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getExperimentExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockExperiments.addException(exception); try { ExperimentName name = ExperimentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]"); client.getExperiment(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getExperimentTest2() throws Exception { Experiment expectedResponse = Experiment.newBuilder() .setName( ExperimentName.of( "[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]") .toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .setDefinition(Experiment.Definition.newBuilder().build()) .setRolloutConfig(RolloutConfig.newBuilder().build()) .setRolloutState(RolloutState.newBuilder().build()) .setRolloutFailureReason("rolloutFailureReason958797373") .setResult(Experiment.Result.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) .setLastUpdateTime(Timestamp.newBuilder().build()) .setExperimentLength(Duration.newBuilder().build()) .addAllVariantsHistory(new ArrayList<VariantsHistory>()) .build(); mockExperiments.addResponse(expectedResponse); String name = "name3373707"; Experiment actualResponse = client.getExperiment(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockExperiments.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetExperimentRequest actualRequest = ((GetExperimentRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getExperimentExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockExperiments.addException(exception); try { String name = "name3373707"; client.getExperiment(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void createExperimentTest() throws Exception { Experiment expectedResponse = Experiment.newBuilder() .setName( ExperimentName.of( "[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]") .toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .setDefinition(Experiment.Definition.newBuilder().build()) .setRolloutConfig(RolloutConfig.newBuilder().build()) .setRolloutState(RolloutState.newBuilder().build()) .setRolloutFailureReason("rolloutFailureReason958797373") .setResult(Experiment.Result.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) .setLastUpdateTime(Timestamp.newBuilder().build()) .setExperimentLength(Duration.newBuilder().build()) .addAllVariantsHistory(new ArrayList<VariantsHistory>()) .build(); mockExperiments.addResponse(expectedResponse); EnvironmentName parent = EnvironmentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]"); Experiment experiment = Experiment.newBuilder().build(); Experiment actualResponse = client.createExperiment(parent, experiment); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockExperiments.getRequests(); Assert.assertEquals(1, actualRequests.size()); CreateExperimentRequest actualRequest = ((CreateExperimentRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertEquals(experiment, actualRequest.getExperiment()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void createExperimentExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockExperiments.addException(exception); try { EnvironmentName parent = EnvironmentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]"); Experiment experiment = Experiment.newBuilder().build(); client.createExperiment(parent, experiment); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void createExperimentTest2() throws Exception { Experiment expectedResponse = Experiment.newBuilder() .setName( ExperimentName.of( "[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]") .toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .setDefinition(Experiment.Definition.newBuilder().build()) .setRolloutConfig(RolloutConfig.newBuilder().build()) .setRolloutState(RolloutState.newBuilder().build()) .setRolloutFailureReason("rolloutFailureReason958797373") .setResult(Experiment.Result.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) .setLastUpdateTime(Timestamp.newBuilder().build()) .setExperimentLength(Duration.newBuilder().build()) .addAllVariantsHistory(new ArrayList<VariantsHistory>()) .build(); mockExperiments.addResponse(expectedResponse); String parent = "parent-995424086"; Experiment experiment = Experiment.newBuilder().build(); Experiment actualResponse = client.createExperiment(parent, experiment); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockExperiments.getRequests(); Assert.assertEquals(1, actualRequests.size()); CreateExperimentRequest actualRequest = ((CreateExperimentRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertEquals(experiment, actualRequest.getExperiment()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void createExperimentExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockExperiments.addException(exception); try { String parent = "parent-995424086"; Experiment experiment = Experiment.newBuilder().build(); client.createExperiment(parent, experiment); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void updateExperimentTest() throws Exception { Experiment expectedResponse = Experiment.newBuilder() .setName( ExperimentName.of( "[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]") .toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .setDefinition(Experiment.Definition.newBuilder().build()) .setRolloutConfig(RolloutConfig.newBuilder().build()) .setRolloutState(RolloutState.newBuilder().build()) .setRolloutFailureReason("rolloutFailureReason958797373") .setResult(Experiment.Result.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) .setLastUpdateTime(Timestamp.newBuilder().build()) .setExperimentLength(Duration.newBuilder().build()) .addAllVariantsHistory(new ArrayList<VariantsHistory>()) .build(); mockExperiments.addResponse(expectedResponse); Experiment experiment = Experiment.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); Experiment actualResponse = client.updateExperiment(experiment, updateMask); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockExperiments.getRequests(); Assert.assertEquals(1, actualRequests.size()); UpdateExperimentRequest actualRequest = ((UpdateExperimentRequest) actualRequests.get(0)); Assert.assertEquals(experiment, actualRequest.getExperiment()); Assert.assertEquals(updateMask, actualRequest.getUpdateMask()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void updateExperimentExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockExperiments.addException(exception); try { Experiment experiment = Experiment.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); client.updateExperiment(experiment, updateMask); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void deleteExperimentTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); mockExperiments.addResponse(expectedResponse); ExperimentName name = ExperimentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]"); client.deleteExperiment(name); List<AbstractMessage> actualRequests = mockExperiments.getRequests(); Assert.assertEquals(1, actualRequests.size()); DeleteExperimentRequest actualRequest = ((DeleteExperimentRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void deleteExperimentExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockExperiments.addException(exception); try { ExperimentName name = ExperimentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]"); client.deleteExperiment(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void deleteExperimentTest2() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); mockExperiments.addResponse(expectedResponse); String name = "name3373707"; client.deleteExperiment(name); List<AbstractMessage> actualRequests = mockExperiments.getRequests(); Assert.assertEquals(1, actualRequests.size()); DeleteExperimentRequest actualRequest = ((DeleteExperimentRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void deleteExperimentExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockExperiments.addException(exception); try { String name = "name3373707"; client.deleteExperiment(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void startExperimentTest() throws Exception { Experiment expectedResponse = Experiment.newBuilder() .setName( ExperimentName.of( "[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]") .toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .setDefinition(Experiment.Definition.newBuilder().build()) .setRolloutConfig(RolloutConfig.newBuilder().build()) .setRolloutState(RolloutState.newBuilder().build()) .setRolloutFailureReason("rolloutFailureReason958797373") .setResult(Experiment.Result.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) .setLastUpdateTime(Timestamp.newBuilder().build()) .setExperimentLength(Duration.newBuilder().build()) .addAllVariantsHistory(new ArrayList<VariantsHistory>()) .build(); mockExperiments.addResponse(expectedResponse); ExperimentName name = ExperimentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]"); Experiment actualResponse = client.startExperiment(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockExperiments.getRequests(); Assert.assertEquals(1, actualRequests.size()); StartExperimentRequest actualRequest = ((StartExperimentRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void startExperimentExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockExperiments.addException(exception); try { ExperimentName name = ExperimentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]"); client.startExperiment(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void startExperimentTest2() throws Exception { Experiment expectedResponse = Experiment.newBuilder() .setName( ExperimentName.of( "[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]") .toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .setDefinition(Experiment.Definition.newBuilder().build()) .setRolloutConfig(RolloutConfig.newBuilder().build()) .setRolloutState(RolloutState.newBuilder().build()) .setRolloutFailureReason("rolloutFailureReason958797373") .setResult(Experiment.Result.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) .setLastUpdateTime(Timestamp.newBuilder().build()) .setExperimentLength(Duration.newBuilder().build()) .addAllVariantsHistory(new ArrayList<VariantsHistory>()) .build(); mockExperiments.addResponse(expectedResponse); String name = "name3373707"; Experiment actualResponse = client.startExperiment(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockExperiments.getRequests(); Assert.assertEquals(1, actualRequests.size()); StartExperimentRequest actualRequest = ((StartExperimentRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void startExperimentExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockExperiments.addException(exception); try { String name = "name3373707"; client.startExperiment(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void stopExperimentTest() throws Exception { Experiment expectedResponse = Experiment.newBuilder() .setName( ExperimentName.of( "[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]") .toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .setDefinition(Experiment.Definition.newBuilder().build()) .setRolloutConfig(RolloutConfig.newBuilder().build()) .setRolloutState(RolloutState.newBuilder().build()) .setRolloutFailureReason("rolloutFailureReason958797373") .setResult(Experiment.Result.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) .setLastUpdateTime(Timestamp.newBuilder().build()) .setExperimentLength(Duration.newBuilder().build()) .addAllVariantsHistory(new ArrayList<VariantsHistory>()) .build(); mockExperiments.addResponse(expectedResponse); ExperimentName name = ExperimentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]"); Experiment actualResponse = client.stopExperiment(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockExperiments.getRequests(); Assert.assertEquals(1, actualRequests.size()); StopExperimentRequest actualRequest = ((StopExperimentRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void stopExperimentExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockExperiments.addException(exception); try { ExperimentName name = ExperimentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]"); client.stopExperiment(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void stopExperimentTest2() throws Exception { Experiment expectedResponse = Experiment.newBuilder() .setName( ExperimentName.of( "[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]") .toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .setDefinition(Experiment.Definition.newBuilder().build()) .setRolloutConfig(RolloutConfig.newBuilder().build()) .setRolloutState(RolloutState.newBuilder().build()) .setRolloutFailureReason("rolloutFailureReason958797373") .setResult(Experiment.Result.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) .setLastUpdateTime(Timestamp.newBuilder().build()) .setExperimentLength(Duration.newBuilder().build()) .addAllVariantsHistory(new ArrayList<VariantsHistory>()) .build(); mockExperiments.addResponse(expectedResponse); String name = "name3373707"; Experiment actualResponse = client.stopExperiment(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockExperiments.getRequests(); Assert.assertEquals(1, actualRequests.size()); StopExperimentRequest actualRequest = ((StopExperimentRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void stopExperimentExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockExperiments.addException(exception); try { String name = "name3373707"; client.stopExperiment(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listLocationsTest() throws Exception { Location responsesElement = Location.newBuilder().build(); ListLocationsResponse expectedResponse = ListLocationsResponse.newBuilder() .setNextPageToken("") .addAllLocations(Arrays.asList(responsesElement)) .build(); mockLocations.addResponse(expectedResponse); ListLocationsRequest request = ListLocationsRequest.newBuilder() .setName("name3373707") .setFilter("filter-1274492040") .setPageSize(883849137) .setPageToken("pageToken873572522") .build(); ListLocationsPagedResponse pagedListResponse = client.listLocations(request); List<Location> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getLocationsList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockLocations.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListLocationsRequest actualRequest = ((ListLocationsRequest) actualRequests.get(0)); Assert.assertEquals(request.getName(), actualRequest.getName()); Assert.assertEquals(request.getFilter(), actualRequest.getFilter()); Assert.assertEquals(request.getPageSize(), actualRequest.getPageSize()); Assert.assertEquals(request.getPageToken(), actualRequest.getPageToken()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listLocationsExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockLocations.addException(exception); try { ListLocationsRequest request = ListLocationsRequest.newBuilder() .setName("name3373707") .setFilter("filter-1274492040") .setPageSize(883849137) .setPageToken("pageToken873572522") .build(); client.listLocations(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getLocationTest() throws Exception { Location expectedResponse = Location.newBuilder() .setName("name3373707") .setLocationId("locationId1541836720") .setDisplayName("displayName1714148973") .putAllLabels(new HashMap<String, String>()) .setMetadata(Any.newBuilder().build()) .build(); mockLocations.addResponse(expectedResponse); GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build(); Location actualResponse = client.getLocation(request); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockLocations.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetLocationRequest actualRequest = ((GetLocationRequest) actualRequests.get(0)); Assert.assertEquals(request.getName(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getLocationExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockLocations.addException(exception); try { GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build(); client.getLocation(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } }
apache/hadoop
34,787
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesTasks.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapreduce.v2.app.webapp; import static org.apache.hadoop.yarn.webapp.WebServicesTestUtils.assertResponseStatusCode; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; import java.io.StringReader; import java.util.Map; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.BadRequestException; import javax.ws.rs.NotFoundException; import javax.ws.rs.client.WebTarget; import javax.ws.rs.core.Application; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.http.JettyUtils; import org.apache.hadoop.mapreduce.TaskID; import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.TaskId; import org.apache.hadoop.mapreduce.v2.api.records.TaskReport; import org.apache.hadoop.mapreduce.v2.app.AppContext; import org.apache.hadoop.mapreduce.v2.app.MockAppContext; import org.apache.hadoop.mapreduce.v2.app.job.Job; import org.apache.hadoop.mapreduce.v2.app.job.Task; import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.util.XMLUtils; import org.apache.hadoop.yarn.webapp.GenericExceptionHandler; import org.apache.hadoop.yarn.webapp.JerseyTestBase; import org.apache.hadoop.yarn.webapp.WebServicesTestUtils; import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONObject; import org.junit.jupiter.api.Test; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; import org.xml.sax.InputSource; import org.glassfish.jersey.internal.inject.AbstractBinder; import org.glassfish.jersey.jettison.JettisonFeature; import org.glassfish.jersey.server.ResourceConfig; /** * Test the app master web service Rest API for getting tasks, a specific task, * and task counters. * * /ws/v1/mapreduce/jobs/{jobid}/tasks * /ws/v1/mapreduce/jobs/{jobid}/tasks/{taskid} * /ws/v1/mapreduce/jobs/{jobid}/tasks/{taskid}/counters */ public class TestAMWebServicesTasks extends JerseyTestBase { private static final Configuration CONF = new Configuration(); private static AppContext appContext; @Override protected Application configure() { ResourceConfig config = new ResourceConfig(); config.register(new JerseyBinder()); config.register(AMWebServices.class); config.register(GenericExceptionHandler.class); config.register(new JettisonFeature()).register(JAXBContextResolver.class); return config; } private static class JerseyBinder extends AbstractBinder { @Override protected void configure() { appContext = new MockAppContext(0, 1, 2, 1); App app = new App(appContext); bind(appContext).to(AppContext.class).named("am"); bind(app).to(App.class).named("app"); bind(CONF).to(Configuration.class).named("conf"); final HttpServletResponse response = mock(HttpServletResponse.class); final HttpServletRequest request = mock(HttpServletRequest.class); bind(response).to(HttpServletResponse.class); bind(request).to(HttpServletRequest.class); } } public TestAMWebServicesTasks() { } @Test public void testTasks() throws JSONException, Exception { WebTarget r = targetWithJsonObject(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); Response response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("tasks") .request(MediaType.APPLICATION_JSON).get(Response.class); assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8, response.getMediaType().toString()); JSONObject json = response.readEntity(JSONObject.class); assertEquals(1, json.length(), "incorrect number of elements"); JSONObject tasks = json.getJSONObject("tasks"); JSONArray arr = tasks.getJSONArray("task"); assertEquals(2, arr.length(), "incorrect number of elements"); verifyAMTask(arr, jobsMap.get(id), null); } } @Test public void testTasksDefault() throws JSONException, Exception { WebTarget r = targetWithJsonObject(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); Response response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("tasks").request().get(Response.class); assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8, response.getMediaType().toString()); JSONObject json = response.readEntity(JSONObject.class); assertEquals(1, json.length(), "incorrect number of elements"); JSONObject tasks = json.getJSONObject("tasks"); JSONArray arr = tasks.getJSONArray("task"); assertEquals(2, arr.length(), "incorrect number of elements"); verifyAMTask(arr, jobsMap.get(id), null); } } @Test public void testTasksSlash() throws JSONException, Exception { WebTarget r = targetWithJsonObject(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); Response response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("tasks/") .request(MediaType.APPLICATION_JSON).get(Response.class); assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8, response.getMediaType().toString()); JSONObject json = response.readEntity(JSONObject.class); assertEquals(1, json.length(), "incorrect number of elements"); JSONObject tasks = json.getJSONObject("tasks"); JSONArray arr = tasks.getJSONArray("task"); assertEquals(2, arr.length(), "incorrect number of elements"); verifyAMTask(arr, jobsMap.get(id), null); } } @Test public void testTasksXML() throws JSONException, Exception { WebTarget r = target(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); Response response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("tasks") .request(MediaType.APPLICATION_XML).get(Response.class); assertEquals(MediaType.APPLICATION_XML_TYPE + ";" + JettyUtils.UTF_8, response.getMediaType().toString()); String xml = response.readEntity(String.class); DocumentBuilderFactory dbf = XMLUtils.newSecureDocumentBuilderFactory(); DocumentBuilder db = dbf.newDocumentBuilder(); InputSource is = new InputSource(); is.setCharacterStream(new StringReader(xml)); Document dom = db.parse(is); NodeList tasks = dom.getElementsByTagName("tasks"); assertEquals(1, tasks.getLength(), "incorrect number of elements"); NodeList task = dom.getElementsByTagName("task"); verifyAMTaskXML(task, jobsMap.get(id)); } } @Test public void testTasksQueryMap() throws JSONException, Exception { WebTarget r = targetWithJsonObject(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); String type = "m"; Response response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("tasks").queryParam("type", type) .request(MediaType.APPLICATION_JSON).get(Response.class); assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8, response.getMediaType().toString()); JSONObject json = response.readEntity(JSONObject.class); assertEquals(1, json.length(), "incorrect number of elements"); JSONObject tasks = json.getJSONObject("tasks"); JSONObject task = tasks.getJSONObject("task"); JSONArray arr = new JSONArray(); arr.put(task); assertEquals(1, arr.length(), "incorrect number of elements"); verifyAMTask(arr, jobsMap.get(id), type); } } @Test public void testTasksQueryReduce() throws JSONException, Exception { WebTarget r = targetWithJsonObject(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); String type = "r"; Response response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("tasks").queryParam("type", type) .request(MediaType.APPLICATION_JSON).get(Response.class); assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8, response.getMediaType().toString()); JSONObject json = response.readEntity(JSONObject.class); assertEquals(1, json.length(), "incorrect number of elements"); JSONObject tasks = json.getJSONObject("tasks"); JSONObject task = tasks.getJSONObject("task"); JSONArray arr = new JSONArray(); arr.put(task); assertEquals(1, arr.length(), "incorrect number of elements"); verifyAMTask(arr, jobsMap.get(id), type); } } @Test public void testTasksQueryInvalid() throws JSONException, Exception { WebTarget r = targetWithJsonObject(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); // tasktype must be exactly either "m" or "r" String tasktype = "reduce"; try { Response response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId) .path("tasks").queryParam("type", tasktype) .request(MediaType.APPLICATION_JSON).get(); throw new BadRequestException(response); } catch (BadRequestException ue) { Response response = ue.getResponse(); assertResponseStatusCode(Response.Status.BAD_REQUEST, response.getStatusInfo()); assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8, response.getMediaType().toString()); JSONObject msg = response.readEntity(JSONObject.class); JSONObject exception = msg.getJSONObject("RemoteException"); assertEquals(3, exception.length(), "incorrect number of elements"); String message = exception.getString("message"); String type = exception.getString("exception"); String classname = exception.getString("javaClassName"); WebServicesTestUtils.checkStringMatch("exception message", "tasktype must be either m or r", message); WebServicesTestUtils.checkStringMatch("exception type", "BadRequestException", type); WebServicesTestUtils.checkStringMatch("exception classname", "org.apache.hadoop.yarn.webapp.BadRequestException", classname); } } } @Test public void testTaskId() throws JSONException, Exception { WebTarget r = targetWithJsonObject(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); for (Task task : jobsMap.get(id).getTasks().values()) { String tid = MRApps.toString(task.getID()); Response response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("tasks").path(tid) .request(MediaType.APPLICATION_JSON).get(Response.class); assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8, response.getMediaType().toString()); JSONObject json = response.readEntity(JSONObject.class); assertEquals(1, json.length(), "incorrect number of elements"); JSONObject info = json.getJSONObject("task"); verifyAMSingleTask(info, task); } } } @Test public void testTaskIdSlash() throws JSONException, Exception { WebTarget r = targetWithJsonObject(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); for (Task task : jobsMap.get(id).getTasks().values()) { String tid = MRApps.toString(task.getID()); Response response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("tasks").path(tid + "/") .request(MediaType.APPLICATION_JSON).get(Response.class); assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8, response.getMediaType().toString()); JSONObject json = response.readEntity(JSONObject.class); assertEquals(1, json.length(), "incorrect number of elements"); JSONObject info = json.getJSONObject("task"); verifyAMSingleTask(info, task); } } } @Test public void testTaskIdDefault() throws JSONException, Exception { WebTarget r = targetWithJsonObject(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); for (Task task : jobsMap.get(id).getTasks().values()) { String tid = MRApps.toString(task.getID()); Response response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("tasks").path(tid).request() .get(Response.class); assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8, response.getMediaType().toString()); JSONObject json = response.readEntity(JSONObject.class); assertEquals(1, json.length(), "incorrect number of elements"); JSONObject info = json.getJSONObject("task"); verifyAMSingleTask(info, task); } } } @Test public void testTaskIdBogus() throws JSONException, Exception { WebTarget r = targetWithJsonObject(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); String tid = "bogustaskid"; try { Response response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId) .path("tasks").path(tid).request().get(); throw new NotFoundException(response); } catch (NotFoundException ue) { Response response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId) .path("tasks").path(tid).request().get(); assertResponseStatusCode(Response.Status.NOT_FOUND, response.getStatusInfo()); assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8, response.getMediaType().toString()); JSONObject msg = response.readEntity(JSONObject.class); JSONObject exception = msg.getJSONObject("RemoteException"); assertEquals(3, exception.length(), "incorrect number of elements"); String message = exception.getString("message"); String type = exception.getString("exception"); String classname = exception.getString("javaClassName"); WebServicesTestUtils.checkStringEqual("exception message", "TaskId string : " + "bogustaskid is not properly formed" + "\nReason: java.util.regex.Matcher[pattern=" + TaskID.TASK_ID_REGEX + " region=0,11 lastmatch=]", message); WebServicesTestUtils.checkStringMatch("exception type", "NotFoundException", type); WebServicesTestUtils.checkStringMatch("exception classname", "org.apache.hadoop.yarn.webapp.NotFoundException", classname); } } } @Test public void testTaskIdNonExist() throws JSONException, Exception { WebTarget r = target(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); String tid = "task_0_0000_m_000000"; try { Response response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId) .path("tasks").path(tid).request().get(); throw new NotFoundException(response); } catch (NotFoundException ue) { Response response = ue.getResponse(); assertResponseStatusCode(Response.Status.NOT_FOUND, response.getStatusInfo()); assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8, response.getMediaType().toString()); String entity = response.readEntity(String.class); JSONObject msg = new JSONObject(entity); JSONObject exception = msg.getJSONObject("RemoteException"); assertEquals(3, exception.length(), "incorrect number of elements"); String message = exception.getString("message"); String type = exception.getString("exception"); String classname = exception.getString("javaClassName"); WebServicesTestUtils.checkStringMatch("exception message", "task not found with id task_0_0000_m_000000", message); WebServicesTestUtils.checkStringMatch("exception type", "NotFoundException", type); WebServicesTestUtils.checkStringMatch("exception classname", "org.apache.hadoop.yarn.webapp.NotFoundException", classname); } } } @Test public void testTaskIdInvalid() throws JSONException, Exception { WebTarget r = targetWithJsonObject(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); String tid = "task_0_0000_d_000000"; try { Response response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId) .path("tasks").path(tid).request().get(); throw new NotFoundException(response); } catch (NotFoundException ue) { Response response = ue.getResponse(); assertResponseStatusCode(Response.Status.NOT_FOUND, response.getStatusInfo()); assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8, response.getMediaType().toString()); JSONObject msg = response.readEntity(JSONObject.class); JSONObject exception = msg.getJSONObject("RemoteException"); assertEquals(3, exception.length(), "incorrect number of elements"); String message = exception.getString("message"); String type = exception.getString("exception"); String classname = exception.getString("javaClassName"); WebServicesTestUtils.checkStringEqual("exception message", "TaskId string : " + "task_0_0000_d_000000 is not properly formed" + "\nReason: java.util.regex.Matcher[pattern=" + TaskID.TASK_ID_REGEX + " region=0,20 lastmatch=]", message); WebServicesTestUtils.checkStringMatch("exception type", "NotFoundException", type); WebServicesTestUtils.checkStringMatch("exception classname", "org.apache.hadoop.yarn.webapp.NotFoundException", classname); } } } @Test public void testTaskIdInvalid2() throws JSONException, Exception { WebTarget r = targetWithJsonObject(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); String tid = "task_0_m_000000"; try { Response response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId) .path("tasks").path(tid).request().get(); throw new NotFoundException(response); } catch (NotFoundException ue) { Response response = ue.getResponse(); assertResponseStatusCode(Response.Status.NOT_FOUND, response.getStatusInfo()); assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8, response.getMediaType().toString()); JSONObject msg = response.readEntity(JSONObject.class); JSONObject exception = msg.getJSONObject("RemoteException"); assertEquals(3, exception.length(), "incorrect number of elements"); String message = exception.getString("message"); String type = exception.getString("exception"); String classname = exception.getString("javaClassName"); WebServicesTestUtils.checkStringEqual("exception message", "TaskId string : " + "task_0_m_000000 is not properly formed" + "\nReason: java.util.regex.Matcher[pattern=" + TaskID.TASK_ID_REGEX + " region=0,15 lastmatch=]", message); WebServicesTestUtils.checkStringMatch("exception type", "NotFoundException", type); WebServicesTestUtils.checkStringMatch("exception classname", "org.apache.hadoop.yarn.webapp.NotFoundException", classname); } } } @Test public void testTaskIdInvalid3() throws JSONException, Exception { WebTarget r = targetWithJsonObject(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); String tid = "task_0_0000_m"; try { Response response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId) .path("tasks").path(tid).request().get(); throw new NotFoundException(response); } catch (NotFoundException ue) { Response response = ue.getResponse(); assertResponseStatusCode(Response.Status.NOT_FOUND, response.getStatusInfo()); assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8, response.getMediaType().toString()); JSONObject msg = response.readEntity(JSONObject.class); JSONObject exception = msg.getJSONObject("RemoteException"); assertEquals(3, exception.length(), "incorrect number of elements"); String message = exception.getString("message"); String type = exception.getString("exception"); String classname = exception.getString("javaClassName"); WebServicesTestUtils.checkStringEqual("exception message", "TaskId string : " + "task_0_0000_m is not properly formed" + "\nReason: java.util.regex.Matcher[pattern=" + TaskID.TASK_ID_REGEX + " region=0,13 lastmatch=]", message); WebServicesTestUtils.checkStringMatch("exception type", "NotFoundException", type); WebServicesTestUtils.checkStringMatch("exception classname", "org.apache.hadoop.yarn.webapp.NotFoundException", classname); } } } @Test public void testTaskIdXML() throws Exception { WebTarget r = target(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); for (Task task : jobsMap.get(id).getTasks().values()) { String tid = MRApps.toString(task.getID()); Response response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("tasks").path(tid) .request(MediaType.APPLICATION_XML).get(Response.class); assertEquals(MediaType.APPLICATION_XML_TYPE + ";" + JettyUtils.UTF_8, response.getMediaType().toString()); String xml = response.readEntity(String.class); DocumentBuilderFactory dbf = XMLUtils.newSecureDocumentBuilderFactory(); DocumentBuilder db = dbf.newDocumentBuilder(); InputSource is = new InputSource(); is.setCharacterStream(new StringReader(xml)); Document dom = db.parse(is); NodeList nodes = dom.getElementsByTagName("task"); for (int i = 0; i < nodes.getLength(); i++) { Element element = (Element) nodes.item(i); verifyAMSingleTaskXML(element, task); } } } } public void verifyAMSingleTask(JSONObject info, Task task) throws JSONException { assertEquals(9, info.length(), "incorrect number of elements"); verifyTaskGeneric(task, info.getString("id"), info.getString("state"), info.getString("type"), info.getString("successfulAttempt"), info.getLong("startTime"), info.getLong("finishTime"), info.getLong("elapsedTime"), (float) info.getDouble("progress"), info.getString("status")); } public void verifyAMTask(JSONArray arr, Job job, String type) throws JSONException { for (Task task : job.getTasks().values()) { TaskId id = task.getID(); String tid = MRApps.toString(id); boolean found = false; if (type != null && task.getType() == MRApps.taskType(type)) { for (int i = 0; i < arr.length(); i++) { JSONObject info = arr.getJSONObject(i); if (tid.matches(info.getString("id"))) { found = true; verifyAMSingleTask(info, task); } } assertTrue(found, "task with id: " + tid + " not in web service output"); } } } public void verifyTaskGeneric(Task task, String id, String state, String type, String successfulAttempt, long startTime, long finishTime, long elapsedTime, float progress, String status) { TaskId taskid = task.getID(); String tid = MRApps.toString(taskid); TaskReport report = task.getReport(); WebServicesTestUtils.checkStringMatch("id", tid, id); WebServicesTestUtils.checkStringMatch("type", task.getType().toString(), type); WebServicesTestUtils.checkStringMatch("state", report.getTaskState() .toString(), state); // not easily checked without duplicating logic, just make sure its here assertNotNull(successfulAttempt, "successfulAttempt null"); assertEquals(report.getStartTime(), startTime, "startTime wrong"); assertEquals(report.getFinishTime(), finishTime, "finishTime wrong"); assertEquals(finishTime - startTime, elapsedTime, "elapsedTime wrong"); assertEquals(report.getProgress() * 100, progress, 1e-3f, "progress wrong"); assertEquals(report.getStatus(), status, "status wrong"); } public void verifyAMSingleTaskXML(Element element, Task task) { verifyTaskGeneric(task, WebServicesTestUtils.getXmlString(element, "id"), WebServicesTestUtils.getXmlString(element, "state"), WebServicesTestUtils.getXmlString(element, "type"), WebServicesTestUtils.getXmlString(element, "successfulAttempt"), WebServicesTestUtils.getXmlLong(element, "startTime"), WebServicesTestUtils.getXmlLong(element, "finishTime"), WebServicesTestUtils.getXmlLong(element, "elapsedTime"), WebServicesTestUtils.getXmlFloat(element, "progress"), WebServicesTestUtils.getXmlString(element, "status")); } public void verifyAMTaskXML(NodeList nodes, Job job) { assertEquals(2, nodes.getLength(), "incorrect number of elements"); for (Task task : job.getTasks().values()) { TaskId id = task.getID(); String tid = MRApps.toString(id); boolean found = false; for (int i = 0; i < nodes.getLength(); i++) { Element element = (Element) nodes.item(i); if (tid.matches(WebServicesTestUtils.getXmlString(element, "id"))) { found = true; verifyAMSingleTaskXML(element, task); } } assertTrue(found, "task with id: " + tid + " not in web service output"); } } @Test public void testTaskIdCounters() throws JSONException, Exception { WebTarget r = targetWithJsonObject(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); for (Task task : jobsMap.get(id).getTasks().values()) { String tid = MRApps.toString(task.getID()); Response response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("tasks").path(tid).path("counters") .request(MediaType.APPLICATION_JSON).get(Response.class); assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8, response.getMediaType().toString()); JSONObject json = response.readEntity(JSONObject.class); assertEquals(1, json.length(), "incorrect number of elements"); JSONObject info = json.getJSONObject("jobTaskCounters"); verifyAMJobTaskCounters(info, task); } } } @Test public void testTaskIdCountersSlash() throws JSONException, Exception { WebTarget r = targetWithJsonObject(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); for (Task task : jobsMap.get(id).getTasks().values()) { String tid = MRApps.toString(task.getID()); Response response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("tasks").path(tid).path("counters/") .request(MediaType.APPLICATION_JSON).get(Response.class); assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8, response.getMediaType().toString()); JSONObject json = response.readEntity(JSONObject.class); assertEquals(1, json.length(), "incorrect number of elements"); JSONObject info = json.getJSONObject("jobTaskCounters"); verifyAMJobTaskCounters(info, task); } } } @Test public void testTaskIdCountersDefault() throws JSONException, Exception { WebTarget r = targetWithJsonObject(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); for (Task task : jobsMap.get(id).getTasks().values()) { String tid = MRApps.toString(task.getID()); Response response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("tasks").path(tid).path("counters").request() .get(Response.class); assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8, response.getMediaType().toString()); JSONObject json = response.readEntity(JSONObject.class); assertEquals(1, json.length(), "incorrect number of elements"); JSONObject info = json.getJSONObject("jobTaskCounters"); verifyAMJobTaskCounters(info, task); } } } @Test public void testJobTaskCountersXML() throws Exception { WebTarget r = target(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); for (Task task : jobsMap.get(id).getTasks().values()) { String tid = MRApps.toString(task.getID()); Response response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("tasks").path(tid).path("counters") .request(MediaType.APPLICATION_XML).get(Response.class); assertEquals(MediaType.APPLICATION_XML_TYPE + ";" + JettyUtils.UTF_8, response.getMediaType().toString()); String xml = response.readEntity(String.class); DocumentBuilderFactory dbf = XMLUtils.newSecureDocumentBuilderFactory(); DocumentBuilder db = dbf.newDocumentBuilder(); InputSource is = new InputSource(); is.setCharacterStream(new StringReader(xml)); Document dom = db.parse(is); NodeList info = dom.getElementsByTagName("jobTaskCounters"); verifyAMTaskCountersXML(info, task); } } } public void verifyAMJobTaskCounters(JSONObject info, Task task) throws JSONException { assertEquals(2, info.length(), "incorrect number of elements"); WebServicesTestUtils.checkStringMatch("id", MRApps.toString(task.getID()), info.getString("id")); // just do simple verification of fields - not data is correct // in the fields JSONArray counterGroups = info.getJSONArray("taskCounterGroup"); for (int i = 0; i < counterGroups.length(); i++) { JSONObject counterGroup = counterGroups.getJSONObject(i); String name = counterGroup.getString("counterGroupName"); assertTrue((name != null && !name.isEmpty()), "name not set"); JSONArray counters = counterGroup.getJSONArray("counter"); for (int j = 0; j < counters.length(); j++) { JSONObject counter = counters.getJSONObject(j); String counterName = counter.getString("name"); assertTrue((counterName != null && !counterName.isEmpty()), "name not set"); long value = counter.getLong("value"); assertTrue(value >= 0, "value >= 0"); } } } public void verifyAMTaskCountersXML(NodeList nodes, Task task) { for (int i = 0; i < nodes.getLength(); i++) { Element element = (Element) nodes.item(i); WebServicesTestUtils.checkStringMatch("id", MRApps.toString(task.getID()), WebServicesTestUtils.getXmlString(element, "id")); // just do simple verification of fields - not data is correct // in the fields NodeList groups = element.getElementsByTagName("taskCounterGroup"); for (int j = 0; j < groups.getLength(); j++) { Element counters = (Element) groups.item(j); assertNotNull(counters, "should have counters in the web service info"); String name = WebServicesTestUtils.getXmlString(counters, "counterGroupName"); assertTrue((name != null && !name.isEmpty()), "name not set"); NodeList counterArr = counters.getElementsByTagName("counter"); for (int z = 0; z < counterArr.getLength(); z++) { Element counter = (Element) counterArr.item(z); String counterName = WebServicesTestUtils.getXmlString(counter, "name"); assertTrue((counterName != null && !counterName.isEmpty()), "counter name not set"); long value = WebServicesTestUtils.getXmlLong(counter, "value"); assertTrue(value >= 0, "value not >= 0"); } } } } }
googleapis/google-cloud-java
35,076
java-cloudbuild/proto-google-cloud-build-v1/src/main/java/com/google/cloudbuild/v1/ArtifactResult.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/devtools/cloudbuild/v1/cloudbuild.proto // Protobuf Java Version: 3.25.8 package com.google.cloudbuild.v1; /** * * * <pre> * An artifact that was uploaded during a build. This * is a single record in the artifact manifest JSON file. * </pre> * * Protobuf type {@code google.devtools.cloudbuild.v1.ArtifactResult} */ public final class ArtifactResult extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.devtools.cloudbuild.v1.ArtifactResult) ArtifactResultOrBuilder { private static final long serialVersionUID = 0L; // Use ArtifactResult.newBuilder() to construct. private ArtifactResult(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ArtifactResult() { location_ = ""; fileHash_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ArtifactResult(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_ArtifactResult_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_ArtifactResult_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloudbuild.v1.ArtifactResult.class, com.google.cloudbuild.v1.ArtifactResult.Builder.class); } public static final int LOCATION_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object location_ = ""; /** * * * <pre> * The path of an artifact in a Cloud Storage bucket, with the * generation number. For example, * `gs://mybucket/path/to/output.jar#generation`. * </pre> * * <code>string location = 1;</code> * * @return The location. */ @java.lang.Override public java.lang.String getLocation() { java.lang.Object ref = location_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); location_ = s; return s; } } /** * * * <pre> * The path of an artifact in a Cloud Storage bucket, with the * generation number. For example, * `gs://mybucket/path/to/output.jar#generation`. * </pre> * * <code>string location = 1;</code> * * @return The bytes for location. */ @java.lang.Override public com.google.protobuf.ByteString getLocationBytes() { java.lang.Object ref = location_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); location_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILE_HASH_FIELD_NUMBER = 2; @SuppressWarnings("serial") private java.util.List<com.google.cloudbuild.v1.FileHashes> fileHash_; /** * * * <pre> * The file hash of the artifact. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.FileHashes file_hash = 2;</code> */ @java.lang.Override public java.util.List<com.google.cloudbuild.v1.FileHashes> getFileHashList() { return fileHash_; } /** * * * <pre> * The file hash of the artifact. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.FileHashes file_hash = 2;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloudbuild.v1.FileHashesOrBuilder> getFileHashOrBuilderList() { return fileHash_; } /** * * * <pre> * The file hash of the artifact. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.FileHashes file_hash = 2;</code> */ @java.lang.Override public int getFileHashCount() { return fileHash_.size(); } /** * * * <pre> * The file hash of the artifact. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.FileHashes file_hash = 2;</code> */ @java.lang.Override public com.google.cloudbuild.v1.FileHashes getFileHash(int index) { return fileHash_.get(index); } /** * * * <pre> * The file hash of the artifact. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.FileHashes file_hash = 2;</code> */ @java.lang.Override public com.google.cloudbuild.v1.FileHashesOrBuilder getFileHashOrBuilder(int index) { return fileHash_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(location_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, location_); } for (int i = 0; i < fileHash_.size(); i++) { output.writeMessage(2, fileHash_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(location_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, location_); } for (int i = 0; i < fileHash_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, fileHash_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloudbuild.v1.ArtifactResult)) { return super.equals(obj); } com.google.cloudbuild.v1.ArtifactResult other = (com.google.cloudbuild.v1.ArtifactResult) obj; if (!getLocation().equals(other.getLocation())) return false; if (!getFileHashList().equals(other.getFileHashList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + LOCATION_FIELD_NUMBER; hash = (53 * hash) + getLocation().hashCode(); if (getFileHashCount() > 0) { hash = (37 * hash) + FILE_HASH_FIELD_NUMBER; hash = (53 * hash) + getFileHashList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloudbuild.v1.ArtifactResult parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloudbuild.v1.ArtifactResult parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloudbuild.v1.ArtifactResult parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloudbuild.v1.ArtifactResult parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloudbuild.v1.ArtifactResult parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloudbuild.v1.ArtifactResult parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloudbuild.v1.ArtifactResult parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloudbuild.v1.ArtifactResult parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloudbuild.v1.ArtifactResult parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloudbuild.v1.ArtifactResult parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloudbuild.v1.ArtifactResult parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloudbuild.v1.ArtifactResult parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloudbuild.v1.ArtifactResult prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * An artifact that was uploaded during a build. This * is a single record in the artifact manifest JSON file. * </pre> * * Protobuf type {@code google.devtools.cloudbuild.v1.ArtifactResult} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.devtools.cloudbuild.v1.ArtifactResult) com.google.cloudbuild.v1.ArtifactResultOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_ArtifactResult_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_ArtifactResult_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloudbuild.v1.ArtifactResult.class, com.google.cloudbuild.v1.ArtifactResult.Builder.class); } // Construct using com.google.cloudbuild.v1.ArtifactResult.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; location_ = ""; if (fileHashBuilder_ == null) { fileHash_ = java.util.Collections.emptyList(); } else { fileHash_ = null; fileHashBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_ArtifactResult_descriptor; } @java.lang.Override public com.google.cloudbuild.v1.ArtifactResult getDefaultInstanceForType() { return com.google.cloudbuild.v1.ArtifactResult.getDefaultInstance(); } @java.lang.Override public com.google.cloudbuild.v1.ArtifactResult build() { com.google.cloudbuild.v1.ArtifactResult result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloudbuild.v1.ArtifactResult buildPartial() { com.google.cloudbuild.v1.ArtifactResult result = new com.google.cloudbuild.v1.ArtifactResult(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(com.google.cloudbuild.v1.ArtifactResult result) { if (fileHashBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { fileHash_ = java.util.Collections.unmodifiableList(fileHash_); bitField0_ = (bitField0_ & ~0x00000002); } result.fileHash_ = fileHash_; } else { result.fileHash_ = fileHashBuilder_.build(); } } private void buildPartial0(com.google.cloudbuild.v1.ArtifactResult result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.location_ = location_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloudbuild.v1.ArtifactResult) { return mergeFrom((com.google.cloudbuild.v1.ArtifactResult) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloudbuild.v1.ArtifactResult other) { if (other == com.google.cloudbuild.v1.ArtifactResult.getDefaultInstance()) return this; if (!other.getLocation().isEmpty()) { location_ = other.location_; bitField0_ |= 0x00000001; onChanged(); } if (fileHashBuilder_ == null) { if (!other.fileHash_.isEmpty()) { if (fileHash_.isEmpty()) { fileHash_ = other.fileHash_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureFileHashIsMutable(); fileHash_.addAll(other.fileHash_); } onChanged(); } } else { if (!other.fileHash_.isEmpty()) { if (fileHashBuilder_.isEmpty()) { fileHashBuilder_.dispose(); fileHashBuilder_ = null; fileHash_ = other.fileHash_; bitField0_ = (bitField0_ & ~0x00000002); fileHashBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getFileHashFieldBuilder() : null; } else { fileHashBuilder_.addAllMessages(other.fileHash_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { location_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { com.google.cloudbuild.v1.FileHashes m = input.readMessage( com.google.cloudbuild.v1.FileHashes.parser(), extensionRegistry); if (fileHashBuilder_ == null) { ensureFileHashIsMutable(); fileHash_.add(m); } else { fileHashBuilder_.addMessage(m); } break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object location_ = ""; /** * * * <pre> * The path of an artifact in a Cloud Storage bucket, with the * generation number. For example, * `gs://mybucket/path/to/output.jar#generation`. * </pre> * * <code>string location = 1;</code> * * @return The location. */ public java.lang.String getLocation() { java.lang.Object ref = location_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); location_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The path of an artifact in a Cloud Storage bucket, with the * generation number. For example, * `gs://mybucket/path/to/output.jar#generation`. * </pre> * * <code>string location = 1;</code> * * @return The bytes for location. */ public com.google.protobuf.ByteString getLocationBytes() { java.lang.Object ref = location_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); location_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The path of an artifact in a Cloud Storage bucket, with the * generation number. For example, * `gs://mybucket/path/to/output.jar#generation`. * </pre> * * <code>string location = 1;</code> * * @param value The location to set. * @return This builder for chaining. */ public Builder setLocation(java.lang.String value) { if (value == null) { throw new NullPointerException(); } location_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The path of an artifact in a Cloud Storage bucket, with the * generation number. For example, * `gs://mybucket/path/to/output.jar#generation`. * </pre> * * <code>string location = 1;</code> * * @return This builder for chaining. */ public Builder clearLocation() { location_ = getDefaultInstance().getLocation(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * The path of an artifact in a Cloud Storage bucket, with the * generation number. For example, * `gs://mybucket/path/to/output.jar#generation`. * </pre> * * <code>string location = 1;</code> * * @param value The bytes for location to set. * @return This builder for chaining. */ public Builder setLocationBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); location_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.util.List<com.google.cloudbuild.v1.FileHashes> fileHash_ = java.util.Collections.emptyList(); private void ensureFileHashIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { fileHash_ = new java.util.ArrayList<com.google.cloudbuild.v1.FileHashes>(fileHash_); bitField0_ |= 0x00000002; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloudbuild.v1.FileHashes, com.google.cloudbuild.v1.FileHashes.Builder, com.google.cloudbuild.v1.FileHashesOrBuilder> fileHashBuilder_; /** * * * <pre> * The file hash of the artifact. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.FileHashes file_hash = 2;</code> */ public java.util.List<com.google.cloudbuild.v1.FileHashes> getFileHashList() { if (fileHashBuilder_ == null) { return java.util.Collections.unmodifiableList(fileHash_); } else { return fileHashBuilder_.getMessageList(); } } /** * * * <pre> * The file hash of the artifact. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.FileHashes file_hash = 2;</code> */ public int getFileHashCount() { if (fileHashBuilder_ == null) { return fileHash_.size(); } else { return fileHashBuilder_.getCount(); } } /** * * * <pre> * The file hash of the artifact. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.FileHashes file_hash = 2;</code> */ public com.google.cloudbuild.v1.FileHashes getFileHash(int index) { if (fileHashBuilder_ == null) { return fileHash_.get(index); } else { return fileHashBuilder_.getMessage(index); } } /** * * * <pre> * The file hash of the artifact. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.FileHashes file_hash = 2;</code> */ public Builder setFileHash(int index, com.google.cloudbuild.v1.FileHashes value) { if (fileHashBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFileHashIsMutable(); fileHash_.set(index, value); onChanged(); } else { fileHashBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The file hash of the artifact. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.FileHashes file_hash = 2;</code> */ public Builder setFileHash( int index, com.google.cloudbuild.v1.FileHashes.Builder builderForValue) { if (fileHashBuilder_ == null) { ensureFileHashIsMutable(); fileHash_.set(index, builderForValue.build()); onChanged(); } else { fileHashBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The file hash of the artifact. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.FileHashes file_hash = 2;</code> */ public Builder addFileHash(com.google.cloudbuild.v1.FileHashes value) { if (fileHashBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFileHashIsMutable(); fileHash_.add(value); onChanged(); } else { fileHashBuilder_.addMessage(value); } return this; } /** * * * <pre> * The file hash of the artifact. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.FileHashes file_hash = 2;</code> */ public Builder addFileHash(int index, com.google.cloudbuild.v1.FileHashes value) { if (fileHashBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFileHashIsMutable(); fileHash_.add(index, value); onChanged(); } else { fileHashBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The file hash of the artifact. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.FileHashes file_hash = 2;</code> */ public Builder addFileHash(com.google.cloudbuild.v1.FileHashes.Builder builderForValue) { if (fileHashBuilder_ == null) { ensureFileHashIsMutable(); fileHash_.add(builderForValue.build()); onChanged(); } else { fileHashBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The file hash of the artifact. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.FileHashes file_hash = 2;</code> */ public Builder addFileHash( int index, com.google.cloudbuild.v1.FileHashes.Builder builderForValue) { if (fileHashBuilder_ == null) { ensureFileHashIsMutable(); fileHash_.add(index, builderForValue.build()); onChanged(); } else { fileHashBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The file hash of the artifact. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.FileHashes file_hash = 2;</code> */ public Builder addAllFileHash( java.lang.Iterable<? extends com.google.cloudbuild.v1.FileHashes> values) { if (fileHashBuilder_ == null) { ensureFileHashIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, fileHash_); onChanged(); } else { fileHashBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The file hash of the artifact. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.FileHashes file_hash = 2;</code> */ public Builder clearFileHash() { if (fileHashBuilder_ == null) { fileHash_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { fileHashBuilder_.clear(); } return this; } /** * * * <pre> * The file hash of the artifact. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.FileHashes file_hash = 2;</code> */ public Builder removeFileHash(int index) { if (fileHashBuilder_ == null) { ensureFileHashIsMutable(); fileHash_.remove(index); onChanged(); } else { fileHashBuilder_.remove(index); } return this; } /** * * * <pre> * The file hash of the artifact. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.FileHashes file_hash = 2;</code> */ public com.google.cloudbuild.v1.FileHashes.Builder getFileHashBuilder(int index) { return getFileHashFieldBuilder().getBuilder(index); } /** * * * <pre> * The file hash of the artifact. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.FileHashes file_hash = 2;</code> */ public com.google.cloudbuild.v1.FileHashesOrBuilder getFileHashOrBuilder(int index) { if (fileHashBuilder_ == null) { return fileHash_.get(index); } else { return fileHashBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The file hash of the artifact. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.FileHashes file_hash = 2;</code> */ public java.util.List<? extends com.google.cloudbuild.v1.FileHashesOrBuilder> getFileHashOrBuilderList() { if (fileHashBuilder_ != null) { return fileHashBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(fileHash_); } } /** * * * <pre> * The file hash of the artifact. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.FileHashes file_hash = 2;</code> */ public com.google.cloudbuild.v1.FileHashes.Builder addFileHashBuilder() { return getFileHashFieldBuilder() .addBuilder(com.google.cloudbuild.v1.FileHashes.getDefaultInstance()); } /** * * * <pre> * The file hash of the artifact. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.FileHashes file_hash = 2;</code> */ public com.google.cloudbuild.v1.FileHashes.Builder addFileHashBuilder(int index) { return getFileHashFieldBuilder() .addBuilder(index, com.google.cloudbuild.v1.FileHashes.getDefaultInstance()); } /** * * * <pre> * The file hash of the artifact. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.FileHashes file_hash = 2;</code> */ public java.util.List<com.google.cloudbuild.v1.FileHashes.Builder> getFileHashBuilderList() { return getFileHashFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloudbuild.v1.FileHashes, com.google.cloudbuild.v1.FileHashes.Builder, com.google.cloudbuild.v1.FileHashesOrBuilder> getFileHashFieldBuilder() { if (fileHashBuilder_ == null) { fileHashBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloudbuild.v1.FileHashes, com.google.cloudbuild.v1.FileHashes.Builder, com.google.cloudbuild.v1.FileHashesOrBuilder>( fileHash_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); fileHash_ = null; } return fileHashBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.devtools.cloudbuild.v1.ArtifactResult) } // @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.ArtifactResult) private static final com.google.cloudbuild.v1.ArtifactResult DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloudbuild.v1.ArtifactResult(); } public static com.google.cloudbuild.v1.ArtifactResult getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ArtifactResult> PARSER = new com.google.protobuf.AbstractParser<ArtifactResult>() { @java.lang.Override public ArtifactResult parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ArtifactResult> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ArtifactResult> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloudbuild.v1.ArtifactResult getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/incubator-retired-htrace
35,133
htrace-zipkin/src/main/java/com/twitter/zipkin/gen/Span.java
/** * Autogenerated by Thrift Compiler (0.9.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package com.twitter.zipkin.gen; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import org.apache.thrift.protocol.TProtocolException; import org.apache.thrift.EncodingUtils; import org.apache.thrift.TException; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class Span implements org.apache.thrift.TBase<Span, Span._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Span"); private static final org.apache.thrift.protocol.TField TRACE_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("trace_id", org.apache.thrift.protocol.TType.I64, (short)1); private static final org.apache.thrift.protocol.TField NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("name", org.apache.thrift.protocol.TType.STRING, (short)3); private static final org.apache.thrift.protocol.TField ID_FIELD_DESC = new org.apache.thrift.protocol.TField("id", org.apache.thrift.protocol.TType.I64, (short)4); private static final org.apache.thrift.protocol.TField PARENT_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("parent_id", org.apache.thrift.protocol.TType.I64, (short)5); private static final org.apache.thrift.protocol.TField ANNOTATIONS_FIELD_DESC = new org.apache.thrift.protocol.TField("annotations", org.apache.thrift.protocol.TType.LIST, (short)6); private static final org.apache.thrift.protocol.TField BINARY_ANNOTATIONS_FIELD_DESC = new org.apache.thrift.protocol.TField("binary_annotations", org.apache.thrift.protocol.TType.LIST, (short)8); private static final org.apache.thrift.protocol.TField DEBUG_FIELD_DESC = new org.apache.thrift.protocol.TField("debug", org.apache.thrift.protocol.TType.BOOL, (short)9); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new SpanStandardSchemeFactory()); schemes.put(TupleScheme.class, new SpanTupleSchemeFactory()); } public long trace_id; // required public String name; // required public long id; // required public long parent_id; // optional public List<Annotation> annotations; // required public List<BinaryAnnotation> binary_annotations; // required public boolean debug; // optional /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { TRACE_ID((short)1, "trace_id"), NAME((short)3, "name"), ID((short)4, "id"), PARENT_ID((short)5, "parent_id"), ANNOTATIONS((short)6, "annotations"), BINARY_ANNOTATIONS((short)8, "binary_annotations"), DEBUG((short)9, "debug"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // TRACE_ID return TRACE_ID; case 3: // NAME return NAME; case 4: // ID return ID; case 5: // PARENT_ID return PARENT_ID; case 6: // ANNOTATIONS return ANNOTATIONS; case 8: // BINARY_ANNOTATIONS return BINARY_ANNOTATIONS; case 9: // DEBUG return DEBUG; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments private static final int __TRACE_ID_ISSET_ID = 0; private static final int __ID_ISSET_ID = 1; private static final int __PARENT_ID_ISSET_ID = 2; private static final int __DEBUG_ISSET_ID = 3; private byte __isset_bitfield = 0; private _Fields optionals[] = {_Fields.PARENT_ID,_Fields.DEBUG}; public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.TRACE_ID, new org.apache.thrift.meta_data.FieldMetaData("trace_id", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64))); tmpMap.put(_Fields.NAME, new org.apache.thrift.meta_data.FieldMetaData("name", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.ID, new org.apache.thrift.meta_data.FieldMetaData("id", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64))); tmpMap.put(_Fields.PARENT_ID, new org.apache.thrift.meta_data.FieldMetaData("parent_id", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64))); tmpMap.put(_Fields.ANNOTATIONS, new org.apache.thrift.meta_data.FieldMetaData("annotations", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, Annotation.class)))); tmpMap.put(_Fields.BINARY_ANNOTATIONS, new org.apache.thrift.meta_data.FieldMetaData("binary_annotations", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, BinaryAnnotation.class)))); tmpMap.put(_Fields.DEBUG, new org.apache.thrift.meta_data.FieldMetaData("debug", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(Span.class, metaDataMap); } public Span() { this.debug = false; } public Span( long trace_id, String name, long id, List<Annotation> annotations, List<BinaryAnnotation> binary_annotations) { this(); this.trace_id = trace_id; setTrace_idIsSet(true); this.name = name; this.id = id; setIdIsSet(true); this.annotations = annotations; this.binary_annotations = binary_annotations; } /** * Performs a deep copy on <i>other</i>. */ public Span(Span other) { __isset_bitfield = other.__isset_bitfield; this.trace_id = other.trace_id; if (other.isSetName()) { this.name = other.name; } this.id = other.id; this.parent_id = other.parent_id; if (other.isSetAnnotations()) { List<Annotation> __this__annotations = new ArrayList<Annotation>(); for (Annotation other_element : other.annotations) { __this__annotations.add(new Annotation(other_element)); } this.annotations = __this__annotations; } if (other.isSetBinary_annotations()) { List<BinaryAnnotation> __this__binary_annotations = new ArrayList<BinaryAnnotation>(); for (BinaryAnnotation other_element : other.binary_annotations) { __this__binary_annotations.add(new BinaryAnnotation(other_element)); } this.binary_annotations = __this__binary_annotations; } this.debug = other.debug; } public Span deepCopy() { return new Span(this); } @Override public void clear() { setTrace_idIsSet(false); this.trace_id = 0; this.name = null; setIdIsSet(false); this.id = 0; setParent_idIsSet(false); this.parent_id = 0; this.annotations = null; this.binary_annotations = null; this.debug = false; } public long getTrace_id() { return this.trace_id; } public Span setTrace_id(long trace_id) { this.trace_id = trace_id; setTrace_idIsSet(true); return this; } public void unsetTrace_id() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __TRACE_ID_ISSET_ID); } /** Returns true if field trace_id is set (has been assigned a value) and false otherwise */ public boolean isSetTrace_id() { return EncodingUtils.testBit(__isset_bitfield, __TRACE_ID_ISSET_ID); } public void setTrace_idIsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __TRACE_ID_ISSET_ID, value); } public String getName() { return this.name; } public Span setName(String name) { this.name = name; return this; } public void unsetName() { this.name = null; } /** Returns true if field name is set (has been assigned a value) and false otherwise */ public boolean isSetName() { return this.name != null; } public void setNameIsSet(boolean value) { if (!value) { this.name = null; } } public long getId() { return this.id; } public Span setId(long id) { this.id = id; setIdIsSet(true); return this; } public void unsetId() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __ID_ISSET_ID); } /** Returns true if field id is set (has been assigned a value) and false otherwise */ public boolean isSetId() { return EncodingUtils.testBit(__isset_bitfield, __ID_ISSET_ID); } public void setIdIsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __ID_ISSET_ID, value); } public long getParent_id() { return this.parent_id; } public Span setParent_id(long parent_id) { this.parent_id = parent_id; setParent_idIsSet(true); return this; } public void unsetParent_id() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __PARENT_ID_ISSET_ID); } /** Returns true if field parent_id is set (has been assigned a value) and false otherwise */ public boolean isSetParent_id() { return EncodingUtils.testBit(__isset_bitfield, __PARENT_ID_ISSET_ID); } public void setParent_idIsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __PARENT_ID_ISSET_ID, value); } public int getAnnotationsSize() { return (this.annotations == null) ? 0 : this.annotations.size(); } public java.util.Iterator<Annotation> getAnnotationsIterator() { return (this.annotations == null) ? null : this.annotations.iterator(); } public void addToAnnotations(Annotation elem) { if (this.annotations == null) { this.annotations = new ArrayList<Annotation>(); } this.annotations.add(elem); } public List<Annotation> getAnnotations() { return this.annotations; } public Span setAnnotations(List<Annotation> annotations) { this.annotations = annotations; return this; } public void unsetAnnotations() { this.annotations = null; } /** Returns true if field annotations is set (has been assigned a value) and false otherwise */ public boolean isSetAnnotations() { return this.annotations != null; } public void setAnnotationsIsSet(boolean value) { if (!value) { this.annotations = null; } } public int getBinary_annotationsSize() { return (this.binary_annotations == null) ? 0 : this.binary_annotations.size(); } public java.util.Iterator<BinaryAnnotation> getBinary_annotationsIterator() { return (this.binary_annotations == null) ? null : this.binary_annotations.iterator(); } public void addToBinary_annotations(BinaryAnnotation elem) { if (this.binary_annotations == null) { this.binary_annotations = new ArrayList<BinaryAnnotation>(); } this.binary_annotations.add(elem); } public List<BinaryAnnotation> getBinary_annotations() { return this.binary_annotations; } public Span setBinary_annotations(List<BinaryAnnotation> binary_annotations) { this.binary_annotations = binary_annotations; return this; } public void unsetBinary_annotations() { this.binary_annotations = null; } /** Returns true if field binary_annotations is set (has been assigned a value) and false otherwise */ public boolean isSetBinary_annotations() { return this.binary_annotations != null; } public void setBinary_annotationsIsSet(boolean value) { if (!value) { this.binary_annotations = null; } } public boolean isDebug() { return this.debug; } public Span setDebug(boolean debug) { this.debug = debug; setDebugIsSet(true); return this; } public void unsetDebug() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __DEBUG_ISSET_ID); } /** Returns true if field debug is set (has been assigned a value) and false otherwise */ public boolean isSetDebug() { return EncodingUtils.testBit(__isset_bitfield, __DEBUG_ISSET_ID); } public void setDebugIsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __DEBUG_ISSET_ID, value); } public void setFieldValue(_Fields field, Object value) { switch (field) { case TRACE_ID: if (value == null) { unsetTrace_id(); } else { setTrace_id((Long)value); } break; case NAME: if (value == null) { unsetName(); } else { setName((String)value); } break; case ID: if (value == null) { unsetId(); } else { setId((Long)value); } break; case PARENT_ID: if (value == null) { unsetParent_id(); } else { setParent_id((Long)value); } break; case ANNOTATIONS: if (value == null) { unsetAnnotations(); } else { setAnnotations((List<Annotation>)value); } break; case BINARY_ANNOTATIONS: if (value == null) { unsetBinary_annotations(); } else { setBinary_annotations((List<BinaryAnnotation>)value); } break; case DEBUG: if (value == null) { unsetDebug(); } else { setDebug((Boolean)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case TRACE_ID: return Long.valueOf(getTrace_id()); case NAME: return getName(); case ID: return Long.valueOf(getId()); case PARENT_ID: return Long.valueOf(getParent_id()); case ANNOTATIONS: return getAnnotations(); case BINARY_ANNOTATIONS: return getBinary_annotations(); case DEBUG: return Boolean.valueOf(isDebug()); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case TRACE_ID: return isSetTrace_id(); case NAME: return isSetName(); case ID: return isSetId(); case PARENT_ID: return isSetParent_id(); case ANNOTATIONS: return isSetAnnotations(); case BINARY_ANNOTATIONS: return isSetBinary_annotations(); case DEBUG: return isSetDebug(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof Span) return this.equals((Span)that); return false; } public boolean equals(Span that) { if (that == null) return false; boolean this_present_trace_id = true; boolean that_present_trace_id = true; if (this_present_trace_id || that_present_trace_id) { if (!(this_present_trace_id && that_present_trace_id)) return false; if (this.trace_id != that.trace_id) return false; } boolean this_present_name = true && this.isSetName(); boolean that_present_name = true && that.isSetName(); if (this_present_name || that_present_name) { if (!(this_present_name && that_present_name)) return false; if (!this.name.equals(that.name)) return false; } boolean this_present_id = true; boolean that_present_id = true; if (this_present_id || that_present_id) { if (!(this_present_id && that_present_id)) return false; if (this.id != that.id) return false; } boolean this_present_parent_id = true && this.isSetParent_id(); boolean that_present_parent_id = true && that.isSetParent_id(); if (this_present_parent_id || that_present_parent_id) { if (!(this_present_parent_id && that_present_parent_id)) return false; if (this.parent_id != that.parent_id) return false; } boolean this_present_annotations = true && this.isSetAnnotations(); boolean that_present_annotations = true && that.isSetAnnotations(); if (this_present_annotations || that_present_annotations) { if (!(this_present_annotations && that_present_annotations)) return false; if (!this.annotations.equals(that.annotations)) return false; } boolean this_present_binary_annotations = true && this.isSetBinary_annotations(); boolean that_present_binary_annotations = true && that.isSetBinary_annotations(); if (this_present_binary_annotations || that_present_binary_annotations) { if (!(this_present_binary_annotations && that_present_binary_annotations)) return false; if (!this.binary_annotations.equals(that.binary_annotations)) return false; } boolean this_present_debug = true && this.isSetDebug(); boolean that_present_debug = true && that.isSetDebug(); if (this_present_debug || that_present_debug) { if (!(this_present_debug && that_present_debug)) return false; if (this.debug != that.debug) return false; } return true; } @Override public int hashCode() { return 0; } public int compareTo(Span other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; Span typedOther = (Span)other; lastComparison = Boolean.valueOf(isSetTrace_id()).compareTo(typedOther.isSetTrace_id()); if (lastComparison != 0) { return lastComparison; } if (isSetTrace_id()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.trace_id, typedOther.trace_id); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetName()).compareTo(typedOther.isSetName()); if (lastComparison != 0) { return lastComparison; } if (isSetName()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.name, typedOther.name); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetId()).compareTo(typedOther.isSetId()); if (lastComparison != 0) { return lastComparison; } if (isSetId()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.id, typedOther.id); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetParent_id()).compareTo(typedOther.isSetParent_id()); if (lastComparison != 0) { return lastComparison; } if (isSetParent_id()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.parent_id, typedOther.parent_id); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetAnnotations()).compareTo(typedOther.isSetAnnotations()); if (lastComparison != 0) { return lastComparison; } if (isSetAnnotations()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.annotations, typedOther.annotations); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetBinary_annotations()).compareTo(typedOther.isSetBinary_annotations()); if (lastComparison != 0) { return lastComparison; } if (isSetBinary_annotations()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.binary_annotations, typedOther.binary_annotations); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetDebug()).compareTo(typedOther.isSetDebug()); if (lastComparison != 0) { return lastComparison; } if (isSetDebug()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.debug, typedOther.debug); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("Span("); boolean first = true; sb.append("trace_id:"); sb.append(this.trace_id); first = false; if (!first) sb.append(", "); sb.append("name:"); if (this.name == null) { sb.append("null"); } else { sb.append(this.name); } first = false; if (!first) sb.append(", "); sb.append("id:"); sb.append(this.id); first = false; if (isSetParent_id()) { if (!first) sb.append(", "); sb.append("parent_id:"); sb.append(this.parent_id); first = false; } if (!first) sb.append(", "); sb.append("annotations:"); if (this.annotations == null) { sb.append("null"); } else { sb.append(this.annotations); } first = false; if (!first) sb.append(", "); sb.append("binary_annotations:"); if (this.binary_annotations == null) { sb.append("null"); } else { sb.append(this.binary_annotations); } first = false; if (isSetDebug()) { if (!first) sb.append(", "); sb.append("debug:"); sb.append(this.debug); first = false; } sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields // check for sub-struct validity } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor. __isset_bitfield = 0; read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class SpanStandardSchemeFactory implements SchemeFactory { public SpanStandardScheme getScheme() { return new SpanStandardScheme(); } } private static class SpanStandardScheme extends StandardScheme<Span> { public void read(org.apache.thrift.protocol.TProtocol iprot, Span struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // TRACE_ID if (schemeField.type == org.apache.thrift.protocol.TType.I64) { struct.trace_id = iprot.readI64(); struct.setTrace_idIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // NAME if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.name = iprot.readString(); struct.setNameIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 4: // ID if (schemeField.type == org.apache.thrift.protocol.TType.I64) { struct.id = iprot.readI64(); struct.setIdIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 5: // PARENT_ID if (schemeField.type == org.apache.thrift.protocol.TType.I64) { struct.parent_id = iprot.readI64(); struct.setParent_idIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 6: // ANNOTATIONS if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list0 = iprot.readListBegin(); struct.annotations = new ArrayList<Annotation>(_list0.size); for (int _i1 = 0; _i1 < _list0.size; ++_i1) { Annotation _elem2; // required _elem2 = new Annotation(); _elem2.read(iprot); struct.annotations.add(_elem2); } iprot.readListEnd(); } struct.setAnnotationsIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 8: // BINARY_ANNOTATIONS if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list3 = iprot.readListBegin(); struct.binary_annotations = new ArrayList<BinaryAnnotation>(_list3.size); for (int _i4 = 0; _i4 < _list3.size; ++_i4) { BinaryAnnotation _elem5; // required _elem5 = new BinaryAnnotation(); _elem5.read(iprot); struct.binary_annotations.add(_elem5); } iprot.readListEnd(); } struct.setBinary_annotationsIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 9: // DEBUG if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) { struct.debug = iprot.readBool(); struct.setDebugIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); // check for required fields of primitive type, which can't be checked in the validate method struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, Span struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); oprot.writeFieldBegin(TRACE_ID_FIELD_DESC); oprot.writeI64(struct.trace_id); oprot.writeFieldEnd(); if (struct.name != null) { oprot.writeFieldBegin(NAME_FIELD_DESC); oprot.writeString(struct.name); oprot.writeFieldEnd(); } oprot.writeFieldBegin(ID_FIELD_DESC); oprot.writeI64(struct.id); oprot.writeFieldEnd(); if (struct.isSetParent_id()) { oprot.writeFieldBegin(PARENT_ID_FIELD_DESC); oprot.writeI64(struct.parent_id); oprot.writeFieldEnd(); } if (struct.annotations != null) { oprot.writeFieldBegin(ANNOTATIONS_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.annotations.size())); for (Annotation _iter6 : struct.annotations) { _iter6.write(oprot); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } if (struct.binary_annotations != null) { oprot.writeFieldBegin(BINARY_ANNOTATIONS_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.binary_annotations.size())); for (BinaryAnnotation _iter7 : struct.binary_annotations) { _iter7.write(oprot); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } if (struct.isSetDebug()) { oprot.writeFieldBegin(DEBUG_FIELD_DESC); oprot.writeBool(struct.debug); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class SpanTupleSchemeFactory implements SchemeFactory { public SpanTupleScheme getScheme() { return new SpanTupleScheme(); } } private static class SpanTupleScheme extends TupleScheme<Span> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, Span struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.isSetTrace_id()) { optionals.set(0); } if (struct.isSetName()) { optionals.set(1); } if (struct.isSetId()) { optionals.set(2); } if (struct.isSetParent_id()) { optionals.set(3); } if (struct.isSetAnnotations()) { optionals.set(4); } if (struct.isSetBinary_annotations()) { optionals.set(5); } if (struct.isSetDebug()) { optionals.set(6); } oprot.writeBitSet(optionals, 7); if (struct.isSetTrace_id()) { oprot.writeI64(struct.trace_id); } if (struct.isSetName()) { oprot.writeString(struct.name); } if (struct.isSetId()) { oprot.writeI64(struct.id); } if (struct.isSetParent_id()) { oprot.writeI64(struct.parent_id); } if (struct.isSetAnnotations()) { { oprot.writeI32(struct.annotations.size()); for (Annotation _iter8 : struct.annotations) { _iter8.write(oprot); } } } if (struct.isSetBinary_annotations()) { { oprot.writeI32(struct.binary_annotations.size()); for (BinaryAnnotation _iter9 : struct.binary_annotations) { _iter9.write(oprot); } } } if (struct.isSetDebug()) { oprot.writeBool(struct.debug); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, Span struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(7); if (incoming.get(0)) { struct.trace_id = iprot.readI64(); struct.setTrace_idIsSet(true); } if (incoming.get(1)) { struct.name = iprot.readString(); struct.setNameIsSet(true); } if (incoming.get(2)) { struct.id = iprot.readI64(); struct.setIdIsSet(true); } if (incoming.get(3)) { struct.parent_id = iprot.readI64(); struct.setParent_idIsSet(true); } if (incoming.get(4)) { { org.apache.thrift.protocol.TList _list10 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32()); struct.annotations = new ArrayList<Annotation>(_list10.size); for (int _i11 = 0; _i11 < _list10.size; ++_i11) { Annotation _elem12; // required _elem12 = new Annotation(); _elem12.read(iprot); struct.annotations.add(_elem12); } } struct.setAnnotationsIsSet(true); } if (incoming.get(5)) { { org.apache.thrift.protocol.TList _list13 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32()); struct.binary_annotations = new ArrayList<BinaryAnnotation>(_list13.size); for (int _i14 = 0; _i14 < _list13.size; ++_i14) { BinaryAnnotation _elem15; // required _elem15 = new BinaryAnnotation(); _elem15.read(iprot); struct.binary_annotations.add(_elem15); } } struct.setBinary_annotationsIsSet(true); } if (incoming.get(6)) { struct.debug = iprot.readBool(); struct.setDebugIsSet(true); } } } }
apache/incubator-atlas
35,415
typesystem/src/main/java/org/apache/atlas/typesystem/types/TypeSystem.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.atlas.typesystem.types; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import org.apache.atlas.AtlasException; import org.apache.atlas.classification.InterfaceAudience; import org.apache.atlas.typesystem.TypesDef; import org.apache.atlas.typesystem.exception.TypeExistsException; import org.apache.atlas.typesystem.exception.TypeNotFoundException; import org.apache.atlas.typesystem.types.cache.DefaultTypeCache; import org.apache.atlas.typesystem.types.cache.TypeCache; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.inject.Singleton; import java.lang.reflect.Constructor; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TimeZone; import java.util.concurrent.ConcurrentHashMap; @Singleton @InterfaceAudience.Private @Deprecated public class TypeSystem { private static final Logger LOG = LoggerFactory.getLogger(TypeSystem.class); private static final TypeSystem INSTANCE = new TypeSystem(); private static ThreadLocal<SimpleDateFormat> dateFormat = new ThreadLocal<SimpleDateFormat>() { @Override public SimpleDateFormat initialValue() { SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); return dateFormat; } }; private TypeCache typeCache = new DefaultTypeCache(); private IdType idType; private Map<String, IDataType> coreTypes; public TypeSystem() { initialize(); } public static TypeSystem getInstance() { return INSTANCE; } /** * This is only used for testing purposes. Not intended for public use. */ @InterfaceAudience.Private public TypeSystem reset() { typeCache.clear(); // clear all entries in cache initialize(); return this; } public void setTypeCache(TypeCache typeCache) { this.typeCache = typeCache; } private void initialize() { coreTypes = new ConcurrentHashMap<>(); registerPrimitiveTypes(); registerCoreTypes(); } public ImmutableList<String> getCoreTypes() { return ImmutableList.copyOf(coreTypes.keySet()); } public ImmutableList<String> getTypeNames() throws AtlasException { List<String> typeNames = new ArrayList<>(typeCache.getAllTypeNames()); return ImmutableList.copyOf(typeNames); } public ImmutableList<String> getTypeNamesByCategory(final DataTypes.TypeCategory typeCategory) throws AtlasException { return getTypeNames(new HashMap<TypeCache.TYPE_FILTER, String>() {{ put(TypeCache.TYPE_FILTER.CATEGORY, typeCategory.name()); }}); } public ImmutableList<String> getTypeNames(Map<TypeCache.TYPE_FILTER, String> filterMap) throws AtlasException { return ImmutableList.copyOf(typeCache.getTypeNames(filterMap)); } private void registerPrimitiveTypes() { coreTypes.put(DataTypes.BOOLEAN_TYPE.getName(), DataTypes.BOOLEAN_TYPE); coreTypes.put(DataTypes.BYTE_TYPE.getName(), DataTypes.BYTE_TYPE); coreTypes.put(DataTypes.SHORT_TYPE.getName(), DataTypes.SHORT_TYPE); coreTypes.put(DataTypes.INT_TYPE.getName(), DataTypes.INT_TYPE); coreTypes.put(DataTypes.LONG_TYPE.getName(), DataTypes.LONG_TYPE); coreTypes.put(DataTypes.FLOAT_TYPE.getName(), DataTypes.FLOAT_TYPE); coreTypes.put(DataTypes.DOUBLE_TYPE.getName(), DataTypes.DOUBLE_TYPE); coreTypes.put(DataTypes.BIGINTEGER_TYPE.getName(), DataTypes.BIGINTEGER_TYPE); coreTypes.put(DataTypes.BIGDECIMAL_TYPE.getName(), DataTypes.BIGDECIMAL_TYPE); coreTypes.put(DataTypes.DATE_TYPE.getName(), DataTypes.DATE_TYPE); coreTypes.put(DataTypes.STRING_TYPE.getName(), DataTypes.STRING_TYPE); } /* * The only core OOB type we will define is the Struct to represent the Identity of an Instance. */ private void registerCoreTypes() { idType = new IdType(); coreTypes.put(idType.getStructType().getName(), idType.getStructType()); } public IdType getIdType() { return idType; } public boolean isRegistered(String typeName) throws AtlasException { return isCoreType(typeName) || typeCache.has(typeName); } protected boolean isCoreType(String typeName) { return coreTypes.containsKey(typeName); } public IDataType getDataType(String name) throws AtlasException { if (isCoreType(name)) { return coreTypes.get(name); } if (typeCache.has(name)) { return typeCache.get(name); } /* * is this an Array Type? */ String arrElemType = TypeUtils.parseAsArrayType(name); if (arrElemType != null) { IDataType dT = defineArrayType(getDataType(arrElemType)); return dT; } /* * is this a Map Type? */ String[] mapType = TypeUtils.parseAsMapType(name); if (mapType != null) { IDataType dT = defineMapType(getDataType(mapType[0]), getDataType(mapType[1])); return dT; } /* * Invoke cache callback to possibly obtain type from other storage. */ IDataType dT = typeCache.onTypeFault(name); if (dT != null) { return dT; } throw new TypeNotFoundException(String.format("Unknown datatype: %s", name)); } public <T extends IDataType> T getDataType(Class<T> cls, String name) throws AtlasException { try { IDataType dt = getDataType(name); return cls.cast(dt); } catch (ClassCastException cce) { throw new AtlasException(cce); } } public StructType defineStructType(String name, boolean errorIfExists, AttributeDefinition... attrDefs) throws AtlasException { return defineStructType(name, null, errorIfExists, attrDefs); } public StructType defineStructType(String name, String description, boolean errorIfExists, AttributeDefinition... attrDefs) throws AtlasException { StructTypeDefinition structDef = new StructTypeDefinition(name, description, attrDefs); defineTypes(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.of(structDef), ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(), ImmutableList.<HierarchicalTypeDefinition<ClassType>>of()); return getDataType(StructType.class, structDef.typeName); } /** * construct a temporary StructType for a Query Result. This is not registered in the * typeSystem. * The attributes in the typeDefinition can only reference permanent types. * @param name struct type name * @param attrDefs struct type definition * @return temporary struct type * @throws AtlasException */ public StructType defineQueryResultType(String name, Map<String, IDataType> tempTypes, AttributeDefinition... attrDefs) throws AtlasException { AttributeInfo[] infos = new AttributeInfo[attrDefs.length]; for (int i = 0; i < attrDefs.length; i++) { infos[i] = new AttributeInfo(this, attrDefs[i], tempTypes); } return new StructType(this, name, null, infos); } public TraitType defineTraitType(HierarchicalTypeDefinition<TraitType> traitDef) throws AtlasException { defineTypes(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(), ImmutableList.of(traitDef), ImmutableList.<HierarchicalTypeDefinition<ClassType>>of()); return getDataType(TraitType.class, traitDef.typeName); } public ClassType defineClassType(HierarchicalTypeDefinition<ClassType> classDef) throws AtlasException { defineTypes(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(), ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(), ImmutableList.of(classDef)); return getDataType(ClassType.class, classDef.typeName); } public Map<String, IDataType> defineTraitTypes(HierarchicalTypeDefinition<TraitType>... traitDefs) throws AtlasException { TransientTypeSystem transientTypes = new TransientTypeSystem(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(), ImmutableList.copyOf(traitDefs), ImmutableList.<HierarchicalTypeDefinition<ClassType>>of()); return transientTypes.defineTypes(false); } public Map<String, IDataType> defineClassTypes(HierarchicalTypeDefinition<ClassType>... classDefs) throws AtlasException { TransientTypeSystem transientTypes = new TransientTypeSystem(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(), ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(), ImmutableList.copyOf(classDefs)); return transientTypes.defineTypes(false); } public Map<String, IDataType> updateTypes(TypesDef typesDef) throws AtlasException { ImmutableList<EnumTypeDefinition> enumDefs = ImmutableList.copyOf(typesDef.enumTypesAsJavaList()); ImmutableList<StructTypeDefinition> structDefs = ImmutableList.copyOf(typesDef.structTypesAsJavaList()); ImmutableList<HierarchicalTypeDefinition<TraitType>> traitDefs = ImmutableList.copyOf(typesDef.traitTypesAsJavaList()); ImmutableList<HierarchicalTypeDefinition<ClassType>> classDefs = ImmutableList.copyOf(typesDef.classTypesAsJavaList()); TransientTypeSystem transientTypes = new TransientTypeSystem(enumDefs, structDefs, traitDefs, classDefs); return transientTypes.defineTypes(true); } public Map<String, IDataType> defineTypes(TypesDef typesDef) throws AtlasException { ImmutableList<EnumTypeDefinition> enumDefs = ImmutableList.copyOf(typesDef.enumTypesAsJavaList()); ImmutableList<StructTypeDefinition> structDefs = ImmutableList.copyOf(typesDef.structTypesAsJavaList()); ImmutableList<HierarchicalTypeDefinition<TraitType>> traitDefs = ImmutableList.copyOf(typesDef.traitTypesAsJavaList()); ImmutableList<HierarchicalTypeDefinition<ClassType>> classDefs = ImmutableList.copyOf(typesDef.classTypesAsJavaList()); return defineTypes(enumDefs, structDefs, traitDefs, classDefs); } public Map<String, IDataType> defineTypes(ImmutableList<EnumTypeDefinition> enumDefs, ImmutableList<StructTypeDefinition> structDefs, ImmutableList<HierarchicalTypeDefinition<TraitType>> traitDefs, ImmutableList<HierarchicalTypeDefinition<ClassType>> classDefs) throws AtlasException { TransientTypeSystem transientTypes = new TransientTypeSystem(enumDefs, structDefs, traitDefs, classDefs); return transientTypes.defineTypes(false); } public DataTypes.ArrayType defineArrayType(IDataType elemType) throws AtlasException { assert elemType != null; DataTypes.ArrayType dT = new DataTypes.ArrayType(elemType); return dT; } public DataTypes.MapType defineMapType(IDataType keyType, IDataType valueType) throws AtlasException { assert keyType != null; assert valueType != null; DataTypes.MapType dT = new DataTypes.MapType(keyType, valueType); return dT; } public EnumType defineEnumType(String name, EnumValue... values) throws AtlasException { return defineEnumType(new EnumTypeDefinition(name, values)); } public EnumType defineEnumType(String name, String description, EnumValue... values) throws AtlasException { return defineEnumType(new EnumTypeDefinition(name, description, values)); } public EnumType defineEnumType(EnumTypeDefinition eDef) throws AtlasException { assert eDef.name != null; if (isRegistered(eDef.name)) { throw new AtlasException(String.format("Redefinition of type %s not supported", eDef.name)); } EnumType eT = new EnumType(this, eDef.name, eDef.description, eDef.version, eDef.enumValues); typeCache.put(eT); return eT; } public SimpleDateFormat getDateFormat() { return dateFormat.get(); } public boolean allowNullsInCollections() { return false; } /** * Create an instance of {@link TransientTypeSystem} with the types defined in the {@link TypesDef}. * * As part of this, a set of verifications are run on the types defined. * @param typesDef The new list of types to be created or updated. * @param isUpdate True, if types are updated, false otherwise. * @return {@link TransientTypeSystem} that holds the newly added types. * @throws AtlasException */ public TransientTypeSystem createTransientTypeSystem(TypesDef typesDef, boolean isUpdate) throws AtlasException { ImmutableList<EnumTypeDefinition> enumDefs = ImmutableList.copyOf(typesDef.enumTypesAsJavaList()); ImmutableList<StructTypeDefinition> structDefs = ImmutableList.copyOf(typesDef.structTypesAsJavaList()); ImmutableList<HierarchicalTypeDefinition<TraitType>> traitDefs = ImmutableList.copyOf(typesDef.traitTypesAsJavaList()); ImmutableList<HierarchicalTypeDefinition<ClassType>> classDefs = ImmutableList.copyOf(typesDef.classTypesAsJavaList()); TransientTypeSystem transientTypeSystem = new TransientTypeSystem(enumDefs, structDefs, traitDefs, classDefs); transientTypeSystem.verifyTypes(isUpdate); return transientTypeSystem; } /** * Commit the given types to this {@link TypeSystem} instance. * * This step should be called only after the types have been committed to the backend stores successfully. * @param typesAdded newly added types. * @throws AtlasException */ public void commitTypes(Map<String, IDataType> typesAdded) throws AtlasException { for (Map.Entry<String, IDataType> typeEntry : typesAdded.entrySet()) { IDataType type = typeEntry.getValue(); //Add/replace the new type in the typesystem typeCache.put(type); } } public class TransientTypeSystem extends TypeSystem { final ImmutableList<StructTypeDefinition> structDefs; final ImmutableList<HierarchicalTypeDefinition<TraitType>> traitDefs; final ImmutableList<HierarchicalTypeDefinition<ClassType>> classDefs; private final ImmutableList<EnumTypeDefinition> enumDefs; Map<String, StructTypeDefinition> structNameToDefMap = new HashMap<>(); Map<String, HierarchicalTypeDefinition<TraitType>> traitNameToDefMap = new HashMap<>(); Map<String, HierarchicalTypeDefinition<ClassType>> classNameToDefMap = new HashMap<>(); Map<String, IDataType> transientTypes = null; List<AttributeInfo> recursiveRefs = new ArrayList<>(); List<DataTypes.ArrayType> recursiveArrayTypes = new ArrayList<>(); List<DataTypes.MapType> recursiveMapTypes = new ArrayList<>(); TransientTypeSystem(ImmutableList<EnumTypeDefinition> enumDefs, ImmutableList<StructTypeDefinition> structDefs, ImmutableList<HierarchicalTypeDefinition<TraitType>> traitDefs, ImmutableList<HierarchicalTypeDefinition<ClassType>> classDefs) { this.enumDefs = enumDefs; this.structDefs = structDefs; this.traitDefs = traitDefs; this.classDefs = classDefs; transientTypes = new HashMap<>(); } private IDataType dataType(String name) throws AtlasException { if (transientTypes.containsKey(name)) { return transientTypes.get(name); } return TypeSystem.this.getDataType(IDataType.class, name); } /* * Step 1: * - validate cannot redefine types * - setup shallow Type instances to facilitate recursive type graphs */ private void validateAndSetupShallowTypes(boolean update) throws AtlasException { for (EnumTypeDefinition eDef : enumDefs) { assert eDef.name != null; if (!update) { if (TypeSystem.this.isRegistered(eDef.name)) { throw new TypeExistsException(String.format("Redefinition of type %s is not supported", eDef.name)); } else if (transientTypes.containsKey(eDef.name)) { LOG.warn("Found duplicate definition of type {}. Ignoring..", eDef.name); continue; } } EnumType eT = new EnumType(this, eDef.name, eDef.description, eDef.version, eDef.enumValues); transientTypes.put(eDef.name, eT); } for (StructTypeDefinition sDef : structDefs) { assert sDef.typeName != null; if (!update) { if (TypeSystem.this.isRegistered(sDef.typeName)) { throw new TypeExistsException(String.format("Redefinition of type %s is not supported", sDef.typeName)); } else if (transientTypes.containsKey(sDef.typeName)) { LOG.warn("Found duplicate definition of type {}. Ignoring..", sDef.typeName); continue; } } StructType sT = new StructType(this, sDef.typeName, sDef.typeDescription, sDef.typeVersion, sDef.attributeDefinitions.length); structNameToDefMap.put(sDef.typeName, sDef); transientTypes.put(sDef.typeName, sT); } for (HierarchicalTypeDefinition<TraitType> traitDef : traitDefs) { assert traitDef.typeName != null; if (!update) { if (TypeSystem.this.isRegistered(traitDef.typeName)) { throw new TypeExistsException(String.format("Redefinition of type %s is not supported", traitDef.typeName)); } else if (transientTypes.containsKey(traitDef.typeName)) { LOG.warn("Found duplicate definition of type {}. Ignoring..", traitDef.typeName); continue; } } TraitType tT = new TraitType(this, traitDef.typeName, traitDef.typeDescription, traitDef.typeVersion, traitDef.superTypes, traitDef.attributeDefinitions.length); traitNameToDefMap.put(traitDef.typeName, traitDef); transientTypes.put(traitDef.typeName, tT); } for (HierarchicalTypeDefinition<ClassType> classDef : classDefs) { assert classDef.typeName != null; if (!update) { if (TypeSystem.this.isRegistered(classDef.typeName)) { throw new TypeExistsException(String.format("Redefinition of type %s is not supported", classDef.typeName)); } else if (transientTypes.containsKey(classDef.typeName)) { LOG.warn("Found duplicate definition of type {}. Ignoring..", classDef.typeName); continue; } } ClassType cT = new ClassType(this, classDef.typeName, classDef.typeDescription, classDef.typeVersion, classDef.superTypes, classDef.attributeDefinitions.length); classNameToDefMap.put(classDef.typeName, classDef); transientTypes.put(classDef.typeName, cT); } } @Override public boolean isRegistered(String typeName) throws AtlasException { return transientTypes.containsKey(typeName) || TypeSystem.this.isRegistered(typeName); } private <U extends HierarchicalType> void validateSuperTypes(Class<U> cls, HierarchicalTypeDefinition<U> def) throws AtlasException { for (String superTypeName : def.superTypes) { IDataType dT = dataType(superTypeName); if (dT == null) { throw new AtlasException( String.format("Unknown superType %s in definition of type %s", superTypeName, def.typeName)); } if (!cls.isAssignableFrom(dT.getClass())) { throw new AtlasException( String.format("SuperType %s must be a %s, in definition of type %s", superTypeName, cls.getName(), def.typeName)); } } } /* * Step 2: * - for Hierarchical Types, validate SuperTypes. * - for each Hierarchical Type setup their SuperTypes Graph */ private void validateAndSetupSuperTypes() throws AtlasException { for (HierarchicalTypeDefinition<TraitType> traitDef : traitDefs) { validateSuperTypes(TraitType.class, traitDef); TraitType traitType = getDataType(TraitType.class, traitDef.typeName); traitType.setupSuperTypesGraph(); } for (HierarchicalTypeDefinition<ClassType> classDef : classDefs) { validateSuperTypes(ClassType.class, classDef); ClassType classType = getDataType(ClassType.class, classDef.typeName); classType.setupSuperTypesGraph(); } } private AttributeInfo constructAttributeInfo(AttributeDefinition attrDef) throws AtlasException { AttributeInfo info = new AttributeInfo(this, attrDef, null); if (transientTypes.keySet().contains(attrDef.dataTypeName)) { recursiveRefs.add(info); } if (info.dataType().getTypeCategory() == DataTypes.TypeCategory.ARRAY) { DataTypes.ArrayType arrType = (DataTypes.ArrayType) info.dataType(); if (transientTypes.keySet().contains(arrType.getElemType().getName())) { recursiveArrayTypes.add(arrType); } } if (info.dataType().getTypeCategory() == DataTypes.TypeCategory.MAP) { DataTypes.MapType mapType = (DataTypes.MapType) info.dataType(); if (transientTypes.keySet().contains(mapType.getKeyType().getName())) { recursiveMapTypes.add(mapType); } else if (transientTypes.keySet().contains(mapType.getValueType().getName())) { recursiveMapTypes.add(mapType); } } if (info.multiplicity.upper > 1 && !(info.dataType().getTypeCategory() == DataTypes.TypeCategory.MAP || info.dataType().getTypeCategory() == DataTypes.TypeCategory.ARRAY)) { throw new AtlasException( String.format("A multiplicty of more than one requires a collection type for attribute '%s'", info.name)); } return info; } private StructType constructStructureType(StructTypeDefinition def) throws AtlasException { AttributeInfo[] infos = new AttributeInfo[def.attributeDefinitions.length]; for (int i = 0; i < def.attributeDefinitions.length; i++) { infos[i] = constructAttributeInfo(def.attributeDefinitions[i]); } StructType type = new StructType(this, def.typeName, def.typeDescription, def.typeVersion, infos); transientTypes.put(def.typeName, type); return type; } private <U extends HierarchicalType> U constructHierarchicalType(Class<U> cls, HierarchicalTypeDefinition<U> def) throws AtlasException { AttributeInfo[] infos = new AttributeInfo[def.attributeDefinitions.length]; for (int i = 0; i < def.attributeDefinitions.length; i++) { infos[i] = constructAttributeInfo(def.attributeDefinitions[i]); } try { Constructor<U> cons = cls.getDeclaredConstructor(TypeSystem.class, String.class, String.class, String.class, ImmutableSet.class, AttributeInfo[].class); U type = cons.newInstance(this, def.typeName, def.typeDescription, def.typeVersion, def.superTypes, infos); transientTypes.put(def.typeName, type); return type; } catch (Exception e) { e.printStackTrace(); throw new AtlasException(String.format("Cannot construct Type of MetaType %s - %s", cls.getName(), def.typeName), e); } } /* * Step 3: * - Order Hierarchical Types in order of SuperType before SubType. * - Construct all the Types */ private void orderAndConstructTypes() throws AtlasException { List<TraitType> traitTypes = new ArrayList<>(); for (String traitTypeName : traitNameToDefMap.keySet()) { traitTypes.add(getDataType(TraitType.class, traitTypeName)); } traitTypes = HierarchicalTypeDependencySorter.sortTypes(traitTypes); List<ClassType> classTypes = new ArrayList<>(); for (String classTypeName : classNameToDefMap.keySet()) { classTypes.add(getDataType(ClassType.class, classTypeName)); } classTypes = HierarchicalTypeDependencySorter.sortTypes(classTypes); for (StructTypeDefinition structDef : structDefs) { constructStructureType(structDef); } for (TraitType traitType : traitTypes) { constructHierarchicalType(TraitType.class, traitNameToDefMap.get(traitType.getName())); } for (ClassType classType : classTypes) { constructHierarchicalType(ClassType.class, classNameToDefMap.get(classType.getName())); } } /* * Step 4: * - fix up references in recursive AttrInfo and recursive Collection Types. */ private void setupRecursiveTypes() throws AtlasException { for (AttributeInfo info : recursiveRefs) { info.setDataType(dataType(info.dataType().getName())); } for (DataTypes.ArrayType arrType : recursiveArrayTypes) { arrType.setElemType(dataType(arrType.getElemType().getName())); } for (DataTypes.MapType mapType : recursiveMapTypes) { mapType.setKeyType(dataType(mapType.getKeyType().getName())); mapType.setValueType(dataType(mapType.getValueType().getName())); } } /** * Step 5: * - Validate that the update can be done */ private void validateUpdateIsPossible() throws TypeUpdateException, AtlasException { //If the type is modified, validate that update can be done for (IDataType newType : transientTypes.values()) { IDataType oldType = null; try { oldType = TypeSystem.this.getDataType(IDataType.class, newType.getName()); } catch (TypeNotFoundException e) { LOG.debug(String.format("No existing type %s found - update OK", newType.getName())); } if (oldType != null) { oldType.validateUpdate(newType); } } } Map<String, IDataType> defineTypes(boolean update) throws AtlasException { verifyTypes(update); Map<String, IDataType> typesAdded = getTypesAdded(); commitTypes(typesAdded); return typesAdded; } @Override public ImmutableList<String> getTypeNames() throws AtlasException { Set<String> typeNames = transientTypes.keySet(); typeNames.addAll(TypeSystem.this.getTypeNames()); return ImmutableList.copyOf(typeNames); } //get from transient types. Else, from main type system @Override public IDataType getDataType(String name) throws AtlasException { if (transientTypes != null) { if (transientTypes.containsKey(name)) { return transientTypes.get(name); } /* * is this an Array Type? */ String arrElemType = TypeUtils.parseAsArrayType(name); if (arrElemType != null) { IDataType dT = defineArrayType(getDataType(IDataType.class, arrElemType)); return dT; } /* * is this a Map Type? */ String[] mapType = TypeUtils.parseAsMapType(name); if (mapType != null) { IDataType dT = defineMapType(getDataType(IDataType.class, mapType[0]), getDataType(IDataType.class, mapType[1])); return dT; } } return TypeSystem.this.getDataType(name); } @Override public StructType defineStructType(String name, boolean errorIfExists, AttributeDefinition... attrDefs) throws AtlasException { throw new AtlasException("Internal Error: define type called on TransientTypeSystem"); } @Override public TraitType defineTraitType(HierarchicalTypeDefinition traitDef) throws AtlasException { throw new AtlasException("Internal Error: define type called on TransientTypeSystem"); } @Override public ClassType defineClassType(HierarchicalTypeDefinition<ClassType> classDef) throws AtlasException { throw new AtlasException("Internal Error: define type called on TransientTypeSystem"); } @Override public Map<String, IDataType> defineTypes(ImmutableList<EnumTypeDefinition> enumDefs, ImmutableList<StructTypeDefinition> structDefs, ImmutableList<HierarchicalTypeDefinition<TraitType>> traitDefs, ImmutableList<HierarchicalTypeDefinition<ClassType>> classDefs) throws AtlasException { throw new AtlasException("Internal Error: define type called on TransientTypeSystem"); } @Override public DataTypes.ArrayType defineArrayType(IDataType elemType) throws AtlasException { return super.defineArrayType(elemType); } @Override public DataTypes.MapType defineMapType(IDataType keyType, IDataType valueType) throws AtlasException { return super.defineMapType(keyType, valueType); } void verifyTypes(boolean isUpdate) throws AtlasException { validateAndSetupShallowTypes(isUpdate); validateAndSetupSuperTypes(); orderAndConstructTypes(); setupRecursiveTypes(); if (isUpdate) { validateUpdateIsPossible(); } } @Override public void commitTypes(Map<String, IDataType> typesAdded) throws AtlasException { TypeSystem.this.commitTypes(typesAdded); } public Map<String, IDataType> getTypesAdded() { return new HashMap<>(transientTypes); } /** * The core types do not change and they are registered * once in the main type system. */ @Override public ImmutableList<String> getCoreTypes() { return TypeSystem.this.getCoreTypes(); } } public class IdType { private static final String ID_ATTRNAME = "guid"; private static final String TYPENAME_ATTRNAME = "typeName"; private static final String STATE_ATTRNAME = "state"; private static final String VERSION_ATTRNAME = "version"; private static final String TYP_NAME = "__IdType"; private StructType type; private IdType() { AttributeDefinition idAttr = new AttributeDefinition(ID_ATTRNAME, DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null); AttributeDefinition typNmAttr = new AttributeDefinition(TYPENAME_ATTRNAME, DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null); AttributeDefinition stateAttr = new AttributeDefinition(STATE_ATTRNAME, DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null); AttributeDefinition versionAttr = new AttributeDefinition(VERSION_ATTRNAME, DataTypes.INT_TYPE.getName(), Multiplicity.REQUIRED, false, null); try { AttributeInfo[] infos = new AttributeInfo[4]; infos[0] = new AttributeInfo(TypeSystem.this, idAttr, null); infos[1] = new AttributeInfo(TypeSystem.this, typNmAttr, null); infos[2] = new AttributeInfo(TypeSystem.this, stateAttr, null); infos[3] = new AttributeInfo(TypeSystem.this, versionAttr, null); type = new StructType(TypeSystem.this, TYP_NAME, null, infos); } catch (AtlasException me) { throw new RuntimeException(me); } } public StructType getStructType() { return type; } public String getName() { return TYP_NAME; } public String idAttrName() { return ID_ATTRNAME; } public String typeNameAttrName() { return TYPENAME_ATTRNAME; } public String stateAttrName() { return STATE_ATTRNAME; } public String versionAttrName() { return VERSION_ATTRNAME; } } public static final String ID_STRUCT_ID_ATTRNAME = IdType.ID_ATTRNAME; public static final String ID_STRUCT_TYP_NAME = IdType.TYP_NAME; }
googleapis/google-cloud-java
35,068
java-chronicle/proto-google-cloud-chronicle-v1/src/main/java/com/google/cloud/chronicle/v1/ListRuleRevisionsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/chronicle/v1/rule.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.chronicle.v1; /** * * * <pre> * Response message for ListRuleRevisions method. * </pre> * * Protobuf type {@code google.cloud.chronicle.v1.ListRuleRevisionsResponse} */ public final class ListRuleRevisionsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.chronicle.v1.ListRuleRevisionsResponse) ListRuleRevisionsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListRuleRevisionsResponse.newBuilder() to construct. private ListRuleRevisionsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListRuleRevisionsResponse() { rules_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListRuleRevisionsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.chronicle.v1.RuleProto .internal_static_google_cloud_chronicle_v1_ListRuleRevisionsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.chronicle.v1.RuleProto .internal_static_google_cloud_chronicle_v1_ListRuleRevisionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.chronicle.v1.ListRuleRevisionsResponse.class, com.google.cloud.chronicle.v1.ListRuleRevisionsResponse.Builder.class); } public static final int RULES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.chronicle.v1.Rule> rules_; /** * * * <pre> * The revisions of the rule. * </pre> * * <code>repeated .google.cloud.chronicle.v1.Rule rules = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.chronicle.v1.Rule> getRulesList() { return rules_; } /** * * * <pre> * The revisions of the rule. * </pre> * * <code>repeated .google.cloud.chronicle.v1.Rule rules = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.chronicle.v1.RuleOrBuilder> getRulesOrBuilderList() { return rules_; } /** * * * <pre> * The revisions of the rule. * </pre> * * <code>repeated .google.cloud.chronicle.v1.Rule rules = 1;</code> */ @java.lang.Override public int getRulesCount() { return rules_.size(); } /** * * * <pre> * The revisions of the rule. * </pre> * * <code>repeated .google.cloud.chronicle.v1.Rule rules = 1;</code> */ @java.lang.Override public com.google.cloud.chronicle.v1.Rule getRules(int index) { return rules_.get(index); } /** * * * <pre> * The revisions of the rule. * </pre> * * <code>repeated .google.cloud.chronicle.v1.Rule rules = 1;</code> */ @java.lang.Override public com.google.cloud.chronicle.v1.RuleOrBuilder getRulesOrBuilder(int index) { return rules_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < rules_.size(); i++) { output.writeMessage(1, rules_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < rules_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, rules_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.chronicle.v1.ListRuleRevisionsResponse)) { return super.equals(obj); } com.google.cloud.chronicle.v1.ListRuleRevisionsResponse other = (com.google.cloud.chronicle.v1.ListRuleRevisionsResponse) obj; if (!getRulesList().equals(other.getRulesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getRulesCount() > 0) { hash = (37 * hash) + RULES_FIELD_NUMBER; hash = (53 * hash) + getRulesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.chronicle.v1.ListRuleRevisionsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.chronicle.v1.ListRuleRevisionsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.chronicle.v1.ListRuleRevisionsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.chronicle.v1.ListRuleRevisionsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.chronicle.v1.ListRuleRevisionsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.chronicle.v1.ListRuleRevisionsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.chronicle.v1.ListRuleRevisionsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.chronicle.v1.ListRuleRevisionsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.chronicle.v1.ListRuleRevisionsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.chronicle.v1.ListRuleRevisionsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.chronicle.v1.ListRuleRevisionsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.chronicle.v1.ListRuleRevisionsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.chronicle.v1.ListRuleRevisionsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for ListRuleRevisions method. * </pre> * * Protobuf type {@code google.cloud.chronicle.v1.ListRuleRevisionsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.chronicle.v1.ListRuleRevisionsResponse) com.google.cloud.chronicle.v1.ListRuleRevisionsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.chronicle.v1.RuleProto .internal_static_google_cloud_chronicle_v1_ListRuleRevisionsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.chronicle.v1.RuleProto .internal_static_google_cloud_chronicle_v1_ListRuleRevisionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.chronicle.v1.ListRuleRevisionsResponse.class, com.google.cloud.chronicle.v1.ListRuleRevisionsResponse.Builder.class); } // Construct using com.google.cloud.chronicle.v1.ListRuleRevisionsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (rulesBuilder_ == null) { rules_ = java.util.Collections.emptyList(); } else { rules_ = null; rulesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.chronicle.v1.RuleProto .internal_static_google_cloud_chronicle_v1_ListRuleRevisionsResponse_descriptor; } @java.lang.Override public com.google.cloud.chronicle.v1.ListRuleRevisionsResponse getDefaultInstanceForType() { return com.google.cloud.chronicle.v1.ListRuleRevisionsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.chronicle.v1.ListRuleRevisionsResponse build() { com.google.cloud.chronicle.v1.ListRuleRevisionsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.chronicle.v1.ListRuleRevisionsResponse buildPartial() { com.google.cloud.chronicle.v1.ListRuleRevisionsResponse result = new com.google.cloud.chronicle.v1.ListRuleRevisionsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.chronicle.v1.ListRuleRevisionsResponse result) { if (rulesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { rules_ = java.util.Collections.unmodifiableList(rules_); bitField0_ = (bitField0_ & ~0x00000001); } result.rules_ = rules_; } else { result.rules_ = rulesBuilder_.build(); } } private void buildPartial0(com.google.cloud.chronicle.v1.ListRuleRevisionsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.chronicle.v1.ListRuleRevisionsResponse) { return mergeFrom((com.google.cloud.chronicle.v1.ListRuleRevisionsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.chronicle.v1.ListRuleRevisionsResponse other) { if (other == com.google.cloud.chronicle.v1.ListRuleRevisionsResponse.getDefaultInstance()) return this; if (rulesBuilder_ == null) { if (!other.rules_.isEmpty()) { if (rules_.isEmpty()) { rules_ = other.rules_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureRulesIsMutable(); rules_.addAll(other.rules_); } onChanged(); } } else { if (!other.rules_.isEmpty()) { if (rulesBuilder_.isEmpty()) { rulesBuilder_.dispose(); rulesBuilder_ = null; rules_ = other.rules_; bitField0_ = (bitField0_ & ~0x00000001); rulesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRulesFieldBuilder() : null; } else { rulesBuilder_.addAllMessages(other.rules_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.chronicle.v1.Rule m = input.readMessage( com.google.cloud.chronicle.v1.Rule.parser(), extensionRegistry); if (rulesBuilder_ == null) { ensureRulesIsMutable(); rules_.add(m); } else { rulesBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.chronicle.v1.Rule> rules_ = java.util.Collections.emptyList(); private void ensureRulesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { rules_ = new java.util.ArrayList<com.google.cloud.chronicle.v1.Rule>(rules_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.chronicle.v1.Rule, com.google.cloud.chronicle.v1.Rule.Builder, com.google.cloud.chronicle.v1.RuleOrBuilder> rulesBuilder_; /** * * * <pre> * The revisions of the rule. * </pre> * * <code>repeated .google.cloud.chronicle.v1.Rule rules = 1;</code> */ public java.util.List<com.google.cloud.chronicle.v1.Rule> getRulesList() { if (rulesBuilder_ == null) { return java.util.Collections.unmodifiableList(rules_); } else { return rulesBuilder_.getMessageList(); } } /** * * * <pre> * The revisions of the rule. * </pre> * * <code>repeated .google.cloud.chronicle.v1.Rule rules = 1;</code> */ public int getRulesCount() { if (rulesBuilder_ == null) { return rules_.size(); } else { return rulesBuilder_.getCount(); } } /** * * * <pre> * The revisions of the rule. * </pre> * * <code>repeated .google.cloud.chronicle.v1.Rule rules = 1;</code> */ public com.google.cloud.chronicle.v1.Rule getRules(int index) { if (rulesBuilder_ == null) { return rules_.get(index); } else { return rulesBuilder_.getMessage(index); } } /** * * * <pre> * The revisions of the rule. * </pre> * * <code>repeated .google.cloud.chronicle.v1.Rule rules = 1;</code> */ public Builder setRules(int index, com.google.cloud.chronicle.v1.Rule value) { if (rulesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRulesIsMutable(); rules_.set(index, value); onChanged(); } else { rulesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The revisions of the rule. * </pre> * * <code>repeated .google.cloud.chronicle.v1.Rule rules = 1;</code> */ public Builder setRules(int index, com.google.cloud.chronicle.v1.Rule.Builder builderForValue) { if (rulesBuilder_ == null) { ensureRulesIsMutable(); rules_.set(index, builderForValue.build()); onChanged(); } else { rulesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The revisions of the rule. * </pre> * * <code>repeated .google.cloud.chronicle.v1.Rule rules = 1;</code> */ public Builder addRules(com.google.cloud.chronicle.v1.Rule value) { if (rulesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRulesIsMutable(); rules_.add(value); onChanged(); } else { rulesBuilder_.addMessage(value); } return this; } /** * * * <pre> * The revisions of the rule. * </pre> * * <code>repeated .google.cloud.chronicle.v1.Rule rules = 1;</code> */ public Builder addRules(int index, com.google.cloud.chronicle.v1.Rule value) { if (rulesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRulesIsMutable(); rules_.add(index, value); onChanged(); } else { rulesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The revisions of the rule. * </pre> * * <code>repeated .google.cloud.chronicle.v1.Rule rules = 1;</code> */ public Builder addRules(com.google.cloud.chronicle.v1.Rule.Builder builderForValue) { if (rulesBuilder_ == null) { ensureRulesIsMutable(); rules_.add(builderForValue.build()); onChanged(); } else { rulesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The revisions of the rule. * </pre> * * <code>repeated .google.cloud.chronicle.v1.Rule rules = 1;</code> */ public Builder addRules(int index, com.google.cloud.chronicle.v1.Rule.Builder builderForValue) { if (rulesBuilder_ == null) { ensureRulesIsMutable(); rules_.add(index, builderForValue.build()); onChanged(); } else { rulesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The revisions of the rule. * </pre> * * <code>repeated .google.cloud.chronicle.v1.Rule rules = 1;</code> */ public Builder addAllRules( java.lang.Iterable<? extends com.google.cloud.chronicle.v1.Rule> values) { if (rulesBuilder_ == null) { ensureRulesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, rules_); onChanged(); } else { rulesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The revisions of the rule. * </pre> * * <code>repeated .google.cloud.chronicle.v1.Rule rules = 1;</code> */ public Builder clearRules() { if (rulesBuilder_ == null) { rules_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { rulesBuilder_.clear(); } return this; } /** * * * <pre> * The revisions of the rule. * </pre> * * <code>repeated .google.cloud.chronicle.v1.Rule rules = 1;</code> */ public Builder removeRules(int index) { if (rulesBuilder_ == null) { ensureRulesIsMutable(); rules_.remove(index); onChanged(); } else { rulesBuilder_.remove(index); } return this; } /** * * * <pre> * The revisions of the rule. * </pre> * * <code>repeated .google.cloud.chronicle.v1.Rule rules = 1;</code> */ public com.google.cloud.chronicle.v1.Rule.Builder getRulesBuilder(int index) { return getRulesFieldBuilder().getBuilder(index); } /** * * * <pre> * The revisions of the rule. * </pre> * * <code>repeated .google.cloud.chronicle.v1.Rule rules = 1;</code> */ public com.google.cloud.chronicle.v1.RuleOrBuilder getRulesOrBuilder(int index) { if (rulesBuilder_ == null) { return rules_.get(index); } else { return rulesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The revisions of the rule. * </pre> * * <code>repeated .google.cloud.chronicle.v1.Rule rules = 1;</code> */ public java.util.List<? extends com.google.cloud.chronicle.v1.RuleOrBuilder> getRulesOrBuilderList() { if (rulesBuilder_ != null) { return rulesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(rules_); } } /** * * * <pre> * The revisions of the rule. * </pre> * * <code>repeated .google.cloud.chronicle.v1.Rule rules = 1;</code> */ public com.google.cloud.chronicle.v1.Rule.Builder addRulesBuilder() { return getRulesFieldBuilder() .addBuilder(com.google.cloud.chronicle.v1.Rule.getDefaultInstance()); } /** * * * <pre> * The revisions of the rule. * </pre> * * <code>repeated .google.cloud.chronicle.v1.Rule rules = 1;</code> */ public com.google.cloud.chronicle.v1.Rule.Builder addRulesBuilder(int index) { return getRulesFieldBuilder() .addBuilder(index, com.google.cloud.chronicle.v1.Rule.getDefaultInstance()); } /** * * * <pre> * The revisions of the rule. * </pre> * * <code>repeated .google.cloud.chronicle.v1.Rule rules = 1;</code> */ public java.util.List<com.google.cloud.chronicle.v1.Rule.Builder> getRulesBuilderList() { return getRulesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.chronicle.v1.Rule, com.google.cloud.chronicle.v1.Rule.Builder, com.google.cloud.chronicle.v1.RuleOrBuilder> getRulesFieldBuilder() { if (rulesBuilder_ == null) { rulesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.chronicle.v1.Rule, com.google.cloud.chronicle.v1.Rule.Builder, com.google.cloud.chronicle.v1.RuleOrBuilder>( rules_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); rules_ = null; } return rulesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.chronicle.v1.ListRuleRevisionsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.chronicle.v1.ListRuleRevisionsResponse) private static final com.google.cloud.chronicle.v1.ListRuleRevisionsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.chronicle.v1.ListRuleRevisionsResponse(); } public static com.google.cloud.chronicle.v1.ListRuleRevisionsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListRuleRevisionsResponse> PARSER = new com.google.protobuf.AbstractParser<ListRuleRevisionsResponse>() { @java.lang.Override public ListRuleRevisionsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListRuleRevisionsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListRuleRevisionsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.chronicle.v1.ListRuleRevisionsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,148
java-apihub/proto-google-cloud-apihub-v1/src/main/java/com/google/cloud/apihub/v1/UpdateDeploymentRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/apihub/v1/apihub_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.apihub.v1; /** * * * <pre> * The [UpdateDeployment][google.cloud.apihub.v1.ApiHub.UpdateDeployment] * method's request. * </pre> * * Protobuf type {@code google.cloud.apihub.v1.UpdateDeploymentRequest} */ public final class UpdateDeploymentRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.apihub.v1.UpdateDeploymentRequest) UpdateDeploymentRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateDeploymentRequest.newBuilder() to construct. private UpdateDeploymentRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateDeploymentRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateDeploymentRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.apihub.v1.ApiHubServiceProto .internal_static_google_cloud_apihub_v1_UpdateDeploymentRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.apihub.v1.ApiHubServiceProto .internal_static_google_cloud_apihub_v1_UpdateDeploymentRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.apihub.v1.UpdateDeploymentRequest.class, com.google.cloud.apihub.v1.UpdateDeploymentRequest.Builder.class); } private int bitField0_; public static final int DEPLOYMENT_FIELD_NUMBER = 1; private com.google.cloud.apihub.v1.Deployment deployment_; /** * * * <pre> * Required. The deployment resource to update. * * The deployment resource's `name` field is used to identify the deployment * resource to update. * Format: `projects/{project}/locations/{location}/deployments/{deployment}` * </pre> * * <code> * .google.cloud.apihub.v1.Deployment deployment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the deployment field is set. */ @java.lang.Override public boolean hasDeployment() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The deployment resource to update. * * The deployment resource's `name` field is used to identify the deployment * resource to update. * Format: `projects/{project}/locations/{location}/deployments/{deployment}` * </pre> * * <code> * .google.cloud.apihub.v1.Deployment deployment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The deployment. */ @java.lang.Override public com.google.cloud.apihub.v1.Deployment getDeployment() { return deployment_ == null ? com.google.cloud.apihub.v1.Deployment.getDefaultInstance() : deployment_; } /** * * * <pre> * Required. The deployment resource to update. * * The deployment resource's `name` field is used to identify the deployment * resource to update. * Format: `projects/{project}/locations/{location}/deployments/{deployment}` * </pre> * * <code> * .google.cloud.apihub.v1.Deployment deployment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.apihub.v1.DeploymentOrBuilder getDeploymentOrBuilder() { return deployment_ == null ? com.google.cloud.apihub.v1.Deployment.getDefaultInstance() : deployment_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getDeployment()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getDeployment()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.apihub.v1.UpdateDeploymentRequest)) { return super.equals(obj); } com.google.cloud.apihub.v1.UpdateDeploymentRequest other = (com.google.cloud.apihub.v1.UpdateDeploymentRequest) obj; if (hasDeployment() != other.hasDeployment()) return false; if (hasDeployment()) { if (!getDeployment().equals(other.getDeployment())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasDeployment()) { hash = (37 * hash) + DEPLOYMENT_FIELD_NUMBER; hash = (53 * hash) + getDeployment().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.apihub.v1.UpdateDeploymentRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.apihub.v1.UpdateDeploymentRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.apihub.v1.UpdateDeploymentRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.apihub.v1.UpdateDeploymentRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.apihub.v1.UpdateDeploymentRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.apihub.v1.UpdateDeploymentRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.apihub.v1.UpdateDeploymentRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.apihub.v1.UpdateDeploymentRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.apihub.v1.UpdateDeploymentRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.apihub.v1.UpdateDeploymentRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.apihub.v1.UpdateDeploymentRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.apihub.v1.UpdateDeploymentRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.apihub.v1.UpdateDeploymentRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The [UpdateDeployment][google.cloud.apihub.v1.ApiHub.UpdateDeployment] * method's request. * </pre> * * Protobuf type {@code google.cloud.apihub.v1.UpdateDeploymentRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.apihub.v1.UpdateDeploymentRequest) com.google.cloud.apihub.v1.UpdateDeploymentRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.apihub.v1.ApiHubServiceProto .internal_static_google_cloud_apihub_v1_UpdateDeploymentRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.apihub.v1.ApiHubServiceProto .internal_static_google_cloud_apihub_v1_UpdateDeploymentRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.apihub.v1.UpdateDeploymentRequest.class, com.google.cloud.apihub.v1.UpdateDeploymentRequest.Builder.class); } // Construct using com.google.cloud.apihub.v1.UpdateDeploymentRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getDeploymentFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; deployment_ = null; if (deploymentBuilder_ != null) { deploymentBuilder_.dispose(); deploymentBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.apihub.v1.ApiHubServiceProto .internal_static_google_cloud_apihub_v1_UpdateDeploymentRequest_descriptor; } @java.lang.Override public com.google.cloud.apihub.v1.UpdateDeploymentRequest getDefaultInstanceForType() { return com.google.cloud.apihub.v1.UpdateDeploymentRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.apihub.v1.UpdateDeploymentRequest build() { com.google.cloud.apihub.v1.UpdateDeploymentRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.apihub.v1.UpdateDeploymentRequest buildPartial() { com.google.cloud.apihub.v1.UpdateDeploymentRequest result = new com.google.cloud.apihub.v1.UpdateDeploymentRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.apihub.v1.UpdateDeploymentRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.deployment_ = deploymentBuilder_ == null ? deployment_ : deploymentBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.apihub.v1.UpdateDeploymentRequest) { return mergeFrom((com.google.cloud.apihub.v1.UpdateDeploymentRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.apihub.v1.UpdateDeploymentRequest other) { if (other == com.google.cloud.apihub.v1.UpdateDeploymentRequest.getDefaultInstance()) return this; if (other.hasDeployment()) { mergeDeployment(other.getDeployment()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getDeploymentFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.apihub.v1.Deployment deployment_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.apihub.v1.Deployment, com.google.cloud.apihub.v1.Deployment.Builder, com.google.cloud.apihub.v1.DeploymentOrBuilder> deploymentBuilder_; /** * * * <pre> * Required. The deployment resource to update. * * The deployment resource's `name` field is used to identify the deployment * resource to update. * Format: `projects/{project}/locations/{location}/deployments/{deployment}` * </pre> * * <code> * .google.cloud.apihub.v1.Deployment deployment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the deployment field is set. */ public boolean hasDeployment() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The deployment resource to update. * * The deployment resource's `name` field is used to identify the deployment * resource to update. * Format: `projects/{project}/locations/{location}/deployments/{deployment}` * </pre> * * <code> * .google.cloud.apihub.v1.Deployment deployment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The deployment. */ public com.google.cloud.apihub.v1.Deployment getDeployment() { if (deploymentBuilder_ == null) { return deployment_ == null ? com.google.cloud.apihub.v1.Deployment.getDefaultInstance() : deployment_; } else { return deploymentBuilder_.getMessage(); } } /** * * * <pre> * Required. The deployment resource to update. * * The deployment resource's `name` field is used to identify the deployment * resource to update. * Format: `projects/{project}/locations/{location}/deployments/{deployment}` * </pre> * * <code> * .google.cloud.apihub.v1.Deployment deployment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setDeployment(com.google.cloud.apihub.v1.Deployment value) { if (deploymentBuilder_ == null) { if (value == null) { throw new NullPointerException(); } deployment_ = value; } else { deploymentBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The deployment resource to update. * * The deployment resource's `name` field is used to identify the deployment * resource to update. * Format: `projects/{project}/locations/{location}/deployments/{deployment}` * </pre> * * <code> * .google.cloud.apihub.v1.Deployment deployment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setDeployment(com.google.cloud.apihub.v1.Deployment.Builder builderForValue) { if (deploymentBuilder_ == null) { deployment_ = builderForValue.build(); } else { deploymentBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The deployment resource to update. * * The deployment resource's `name` field is used to identify the deployment * resource to update. * Format: `projects/{project}/locations/{location}/deployments/{deployment}` * </pre> * * <code> * .google.cloud.apihub.v1.Deployment deployment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeDeployment(com.google.cloud.apihub.v1.Deployment value) { if (deploymentBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && deployment_ != null && deployment_ != com.google.cloud.apihub.v1.Deployment.getDefaultInstance()) { getDeploymentBuilder().mergeFrom(value); } else { deployment_ = value; } } else { deploymentBuilder_.mergeFrom(value); } if (deployment_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The deployment resource to update. * * The deployment resource's `name` field is used to identify the deployment * resource to update. * Format: `projects/{project}/locations/{location}/deployments/{deployment}` * </pre> * * <code> * .google.cloud.apihub.v1.Deployment deployment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearDeployment() { bitField0_ = (bitField0_ & ~0x00000001); deployment_ = null; if (deploymentBuilder_ != null) { deploymentBuilder_.dispose(); deploymentBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The deployment resource to update. * * The deployment resource's `name` field is used to identify the deployment * resource to update. * Format: `projects/{project}/locations/{location}/deployments/{deployment}` * </pre> * * <code> * .google.cloud.apihub.v1.Deployment deployment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.apihub.v1.Deployment.Builder getDeploymentBuilder() { bitField0_ |= 0x00000001; onChanged(); return getDeploymentFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The deployment resource to update. * * The deployment resource's `name` field is used to identify the deployment * resource to update. * Format: `projects/{project}/locations/{location}/deployments/{deployment}` * </pre> * * <code> * .google.cloud.apihub.v1.Deployment deployment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.apihub.v1.DeploymentOrBuilder getDeploymentOrBuilder() { if (deploymentBuilder_ != null) { return deploymentBuilder_.getMessageOrBuilder(); } else { return deployment_ == null ? com.google.cloud.apihub.v1.Deployment.getDefaultInstance() : deployment_; } } /** * * * <pre> * Required. The deployment resource to update. * * The deployment resource's `name` field is used to identify the deployment * resource to update. * Format: `projects/{project}/locations/{location}/deployments/{deployment}` * </pre> * * <code> * .google.cloud.apihub.v1.Deployment deployment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.apihub.v1.Deployment, com.google.cloud.apihub.v1.Deployment.Builder, com.google.cloud.apihub.v1.DeploymentOrBuilder> getDeploymentFieldBuilder() { if (deploymentBuilder_ == null) { deploymentBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.apihub.v1.Deployment, com.google.cloud.apihub.v1.Deployment.Builder, com.google.cloud.apihub.v1.DeploymentOrBuilder>( getDeployment(), getParentForChildren(), isClean()); deployment_ = null; } return deploymentBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.apihub.v1.UpdateDeploymentRequest) } // @@protoc_insertion_point(class_scope:google.cloud.apihub.v1.UpdateDeploymentRequest) private static final com.google.cloud.apihub.v1.UpdateDeploymentRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.apihub.v1.UpdateDeploymentRequest(); } public static com.google.cloud.apihub.v1.UpdateDeploymentRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateDeploymentRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateDeploymentRequest>() { @java.lang.Override public UpdateDeploymentRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateDeploymentRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateDeploymentRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.apihub.v1.UpdateDeploymentRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,165
java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/UpdateIndexEndpointRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1/index_endpoint_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1; /** * * * <pre> * Request message for * [IndexEndpointService.UpdateIndexEndpoint][google.cloud.aiplatform.v1.IndexEndpointService.UpdateIndexEndpoint]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.UpdateIndexEndpointRequest} */ public final class UpdateIndexEndpointRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.UpdateIndexEndpointRequest) UpdateIndexEndpointRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateIndexEndpointRequest.newBuilder() to construct. private UpdateIndexEndpointRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateIndexEndpointRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateIndexEndpointRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.IndexEndpointServiceProto .internal_static_google_cloud_aiplatform_v1_UpdateIndexEndpointRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.IndexEndpointServiceProto .internal_static_google_cloud_aiplatform_v1_UpdateIndexEndpointRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest.class, com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest.Builder.class); } private int bitField0_; public static final int INDEX_ENDPOINT_FIELD_NUMBER = 1; private com.google.cloud.aiplatform.v1.IndexEndpoint indexEndpoint_; /** * * * <pre> * Required. The IndexEndpoint which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1.IndexEndpoint index_endpoint = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the indexEndpoint field is set. */ @java.lang.Override public boolean hasIndexEndpoint() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The IndexEndpoint which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1.IndexEndpoint index_endpoint = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The indexEndpoint. */ @java.lang.Override public com.google.cloud.aiplatform.v1.IndexEndpoint getIndexEndpoint() { return indexEndpoint_ == null ? com.google.cloud.aiplatform.v1.IndexEndpoint.getDefaultInstance() : indexEndpoint_; } /** * * * <pre> * Required. The IndexEndpoint which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1.IndexEndpoint index_endpoint = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.aiplatform.v1.IndexEndpointOrBuilder getIndexEndpointOrBuilder() { return indexEndpoint_ == null ? com.google.cloud.aiplatform.v1.IndexEndpoint.getDefaultInstance() : indexEndpoint_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Required. The update mask applies to the resource. See * [google.protobuf.FieldMask][google.protobuf.FieldMask]. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The update mask applies to the resource. See * [google.protobuf.FieldMask][google.protobuf.FieldMask]. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Required. The update mask applies to the resource. See * [google.protobuf.FieldMask][google.protobuf.FieldMask]. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getIndexEndpoint()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getIndexEndpoint()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest)) { return super.equals(obj); } com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest other = (com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest) obj; if (hasIndexEndpoint() != other.hasIndexEndpoint()) return false; if (hasIndexEndpoint()) { if (!getIndexEndpoint().equals(other.getIndexEndpoint())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasIndexEndpoint()) { hash = (37 * hash) + INDEX_ENDPOINT_FIELD_NUMBER; hash = (53 * hash) + getIndexEndpoint().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [IndexEndpointService.UpdateIndexEndpoint][google.cloud.aiplatform.v1.IndexEndpointService.UpdateIndexEndpoint]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.UpdateIndexEndpointRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.UpdateIndexEndpointRequest) com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.IndexEndpointServiceProto .internal_static_google_cloud_aiplatform_v1_UpdateIndexEndpointRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.IndexEndpointServiceProto .internal_static_google_cloud_aiplatform_v1_UpdateIndexEndpointRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest.class, com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest.Builder.class); } // Construct using com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getIndexEndpointFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; indexEndpoint_ = null; if (indexEndpointBuilder_ != null) { indexEndpointBuilder_.dispose(); indexEndpointBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1.IndexEndpointServiceProto .internal_static_google_cloud_aiplatform_v1_UpdateIndexEndpointRequest_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest build() { com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest buildPartial() { com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest result = new com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.indexEndpoint_ = indexEndpointBuilder_ == null ? indexEndpoint_ : indexEndpointBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest) { return mergeFrom((com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest other) { if (other == com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest.getDefaultInstance()) return this; if (other.hasIndexEndpoint()) { mergeIndexEndpoint(other.getIndexEndpoint()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getIndexEndpointFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.aiplatform.v1.IndexEndpoint indexEndpoint_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1.IndexEndpoint, com.google.cloud.aiplatform.v1.IndexEndpoint.Builder, com.google.cloud.aiplatform.v1.IndexEndpointOrBuilder> indexEndpointBuilder_; /** * * * <pre> * Required. The IndexEndpoint which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1.IndexEndpoint index_endpoint = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the indexEndpoint field is set. */ public boolean hasIndexEndpoint() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The IndexEndpoint which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1.IndexEndpoint index_endpoint = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The indexEndpoint. */ public com.google.cloud.aiplatform.v1.IndexEndpoint getIndexEndpoint() { if (indexEndpointBuilder_ == null) { return indexEndpoint_ == null ? com.google.cloud.aiplatform.v1.IndexEndpoint.getDefaultInstance() : indexEndpoint_; } else { return indexEndpointBuilder_.getMessage(); } } /** * * * <pre> * Required. The IndexEndpoint which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1.IndexEndpoint index_endpoint = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setIndexEndpoint(com.google.cloud.aiplatform.v1.IndexEndpoint value) { if (indexEndpointBuilder_ == null) { if (value == null) { throw new NullPointerException(); } indexEndpoint_ = value; } else { indexEndpointBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The IndexEndpoint which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1.IndexEndpoint index_endpoint = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setIndexEndpoint( com.google.cloud.aiplatform.v1.IndexEndpoint.Builder builderForValue) { if (indexEndpointBuilder_ == null) { indexEndpoint_ = builderForValue.build(); } else { indexEndpointBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The IndexEndpoint which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1.IndexEndpoint index_endpoint = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeIndexEndpoint(com.google.cloud.aiplatform.v1.IndexEndpoint value) { if (indexEndpointBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && indexEndpoint_ != null && indexEndpoint_ != com.google.cloud.aiplatform.v1.IndexEndpoint.getDefaultInstance()) { getIndexEndpointBuilder().mergeFrom(value); } else { indexEndpoint_ = value; } } else { indexEndpointBuilder_.mergeFrom(value); } if (indexEndpoint_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The IndexEndpoint which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1.IndexEndpoint index_endpoint = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearIndexEndpoint() { bitField0_ = (bitField0_ & ~0x00000001); indexEndpoint_ = null; if (indexEndpointBuilder_ != null) { indexEndpointBuilder_.dispose(); indexEndpointBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The IndexEndpoint which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1.IndexEndpoint index_endpoint = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1.IndexEndpoint.Builder getIndexEndpointBuilder() { bitField0_ |= 0x00000001; onChanged(); return getIndexEndpointFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The IndexEndpoint which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1.IndexEndpoint index_endpoint = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1.IndexEndpointOrBuilder getIndexEndpointOrBuilder() { if (indexEndpointBuilder_ != null) { return indexEndpointBuilder_.getMessageOrBuilder(); } else { return indexEndpoint_ == null ? com.google.cloud.aiplatform.v1.IndexEndpoint.getDefaultInstance() : indexEndpoint_; } } /** * * * <pre> * Required. The IndexEndpoint which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1.IndexEndpoint index_endpoint = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1.IndexEndpoint, com.google.cloud.aiplatform.v1.IndexEndpoint.Builder, com.google.cloud.aiplatform.v1.IndexEndpointOrBuilder> getIndexEndpointFieldBuilder() { if (indexEndpointBuilder_ == null) { indexEndpointBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1.IndexEndpoint, com.google.cloud.aiplatform.v1.IndexEndpoint.Builder, com.google.cloud.aiplatform.v1.IndexEndpointOrBuilder>( getIndexEndpoint(), getParentForChildren(), isClean()); indexEndpoint_ = null; } return indexEndpointBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Required. The update mask applies to the resource. See * [google.protobuf.FieldMask][google.protobuf.FieldMask]. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The update mask applies to the resource. See * [google.protobuf.FieldMask][google.protobuf.FieldMask]. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Required. The update mask applies to the resource. See * [google.protobuf.FieldMask][google.protobuf.FieldMask]. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The update mask applies to the resource. See * [google.protobuf.FieldMask][google.protobuf.FieldMask]. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The update mask applies to the resource. See * [google.protobuf.FieldMask][google.protobuf.FieldMask]. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The update mask applies to the resource. See * [google.protobuf.FieldMask][google.protobuf.FieldMask]. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The update mask applies to the resource. See * [google.protobuf.FieldMask][google.protobuf.FieldMask]. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The update mask applies to the resource. See * [google.protobuf.FieldMask][google.protobuf.FieldMask]. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Required. The update mask applies to the resource. See * [google.protobuf.FieldMask][google.protobuf.FieldMask]. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.UpdateIndexEndpointRequest) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.UpdateIndexEndpointRequest) private static final com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest(); } public static com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateIndexEndpointRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateIndexEndpointRequest>() { @java.lang.Override public UpdateIndexEndpointRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateIndexEndpointRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateIndexEndpointRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1.UpdateIndexEndpointRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,132
java-cloudsecuritycompliance/google-cloud-cloudsecuritycompliance/src/test/java/com/google/cloud/cloudsecuritycompliance/v1/DeploymentClientTest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.cloudsecuritycompliance.v1; import static com.google.cloud.cloudsecuritycompliance.v1.DeploymentClient.ListCloudControlDeploymentsPagedResponse; import static com.google.cloud.cloudsecuritycompliance.v1.DeploymentClient.ListFrameworkDeploymentsPagedResponse; import static com.google.cloud.cloudsecuritycompliance.v1.DeploymentClient.ListLocationsPagedResponse; import com.google.api.gax.core.NoCredentialsProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.testing.LocalChannelProvider; import com.google.api.gax.grpc.testing.MockGrpcService; import com.google.api.gax.grpc.testing.MockServiceHelper; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.InvalidArgumentException; import com.google.api.gax.rpc.StatusCode; import com.google.cloud.location.GetLocationRequest; import com.google.cloud.location.ListLocationsRequest; import com.google.cloud.location.ListLocationsResponse; import com.google.cloud.location.Location; import com.google.common.collect.Lists; import com.google.longrunning.Operation; import com.google.protobuf.AbstractMessage; import com.google.protobuf.Any; import com.google.protobuf.Empty; import com.google.protobuf.Timestamp; import io.grpc.StatusRuntimeException; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.UUID; import java.util.concurrent.ExecutionException; import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @Generated("by gapic-generator-java") public class DeploymentClientTest { private static MockDeployment mockDeployment; private static MockLocations mockLocations; private static MockServiceHelper mockServiceHelper; private LocalChannelProvider channelProvider; private DeploymentClient client; @BeforeClass public static void startStaticServer() { mockDeployment = new MockDeployment(); mockLocations = new MockLocations(); mockServiceHelper = new MockServiceHelper( UUID.randomUUID().toString(), Arrays.<MockGrpcService>asList(mockDeployment, mockLocations)); mockServiceHelper.start(); } @AfterClass public static void stopServer() { mockServiceHelper.stop(); } @Before public void setUp() throws IOException { mockServiceHelper.reset(); channelProvider = mockServiceHelper.createChannelProvider(); DeploymentSettings settings = DeploymentSettings.newBuilder() .setTransportChannelProvider(channelProvider) .setCredentialsProvider(NoCredentialsProvider.create()) .build(); client = DeploymentClient.create(settings); } @After public void tearDown() throws Exception { client.close(); } @Test public void createFrameworkDeploymentTest() throws Exception { FrameworkDeployment expectedResponse = FrameworkDeployment.newBuilder() .setName( FrameworkDeploymentName.of("[ORGANIZATION]", "[LOCATION]", "[FRAMEWORK_DEPLOYMENT]") .toString()) .setTargetResourceConfig(TargetResourceConfig.newBuilder().build()) .setComputedTargetResource("computedTargetResource-479139540") .setFramework(FrameworkReference.newBuilder().build()) .setDescription("description-1724546052") .addAllCloudControlMetadata(new ArrayList<CloudControlMetadata>()) .setDeploymentState(DeploymentState.forNumber(0)) .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setEtag("etag3123477") .setTargetResourceDisplayName("targetResourceDisplayName-1474402258") .addAllCloudControlDeploymentReferences( new ArrayList<CloudControlDeploymentReference>()) .build(); Operation resultOperation = Operation.newBuilder() .setName("createFrameworkDeploymentTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockDeployment.addResponse(resultOperation); OrganizationLocationName parent = OrganizationLocationName.of("[ORGANIZATION]", "[LOCATION]"); FrameworkDeployment frameworkDeployment = FrameworkDeployment.newBuilder().build(); String frameworkDeploymentId = "frameworkDeploymentId-1244700706"; FrameworkDeployment actualResponse = client .createFrameworkDeploymentAsync(parent, frameworkDeployment, frameworkDeploymentId) .get(); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockDeployment.getRequests(); Assert.assertEquals(1, actualRequests.size()); CreateFrameworkDeploymentRequest actualRequest = ((CreateFrameworkDeploymentRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertEquals(frameworkDeployment, actualRequest.getFrameworkDeployment()); Assert.assertEquals(frameworkDeploymentId, actualRequest.getFrameworkDeploymentId()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void createFrameworkDeploymentExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockDeployment.addException(exception); try { OrganizationLocationName parent = OrganizationLocationName.of("[ORGANIZATION]", "[LOCATION]"); FrameworkDeployment frameworkDeployment = FrameworkDeployment.newBuilder().build(); String frameworkDeploymentId = "frameworkDeploymentId-1244700706"; client .createFrameworkDeploymentAsync(parent, frameworkDeployment, frameworkDeploymentId) .get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void createFrameworkDeploymentTest2() throws Exception { FrameworkDeployment expectedResponse = FrameworkDeployment.newBuilder() .setName( FrameworkDeploymentName.of("[ORGANIZATION]", "[LOCATION]", "[FRAMEWORK_DEPLOYMENT]") .toString()) .setTargetResourceConfig(TargetResourceConfig.newBuilder().build()) .setComputedTargetResource("computedTargetResource-479139540") .setFramework(FrameworkReference.newBuilder().build()) .setDescription("description-1724546052") .addAllCloudControlMetadata(new ArrayList<CloudControlMetadata>()) .setDeploymentState(DeploymentState.forNumber(0)) .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setEtag("etag3123477") .setTargetResourceDisplayName("targetResourceDisplayName-1474402258") .addAllCloudControlDeploymentReferences( new ArrayList<CloudControlDeploymentReference>()) .build(); Operation resultOperation = Operation.newBuilder() .setName("createFrameworkDeploymentTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockDeployment.addResponse(resultOperation); String parent = "parent-995424086"; FrameworkDeployment frameworkDeployment = FrameworkDeployment.newBuilder().build(); String frameworkDeploymentId = "frameworkDeploymentId-1244700706"; FrameworkDeployment actualResponse = client .createFrameworkDeploymentAsync(parent, frameworkDeployment, frameworkDeploymentId) .get(); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockDeployment.getRequests(); Assert.assertEquals(1, actualRequests.size()); CreateFrameworkDeploymentRequest actualRequest = ((CreateFrameworkDeploymentRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertEquals(frameworkDeployment, actualRequest.getFrameworkDeployment()); Assert.assertEquals(frameworkDeploymentId, actualRequest.getFrameworkDeploymentId()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void createFrameworkDeploymentExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockDeployment.addException(exception); try { String parent = "parent-995424086"; FrameworkDeployment frameworkDeployment = FrameworkDeployment.newBuilder().build(); String frameworkDeploymentId = "frameworkDeploymentId-1244700706"; client .createFrameworkDeploymentAsync(parent, frameworkDeployment, frameworkDeploymentId) .get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void deleteFrameworkDeploymentTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); Operation resultOperation = Operation.newBuilder() .setName("deleteFrameworkDeploymentTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockDeployment.addResponse(resultOperation); FrameworkDeploymentName name = FrameworkDeploymentName.of("[ORGANIZATION]", "[LOCATION]", "[FRAMEWORK_DEPLOYMENT]"); client.deleteFrameworkDeploymentAsync(name).get(); List<AbstractMessage> actualRequests = mockDeployment.getRequests(); Assert.assertEquals(1, actualRequests.size()); DeleteFrameworkDeploymentRequest actualRequest = ((DeleteFrameworkDeploymentRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void deleteFrameworkDeploymentExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockDeployment.addException(exception); try { FrameworkDeploymentName name = FrameworkDeploymentName.of("[ORGANIZATION]", "[LOCATION]", "[FRAMEWORK_DEPLOYMENT]"); client.deleteFrameworkDeploymentAsync(name).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void deleteFrameworkDeploymentTest2() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); Operation resultOperation = Operation.newBuilder() .setName("deleteFrameworkDeploymentTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockDeployment.addResponse(resultOperation); String name = "name3373707"; client.deleteFrameworkDeploymentAsync(name).get(); List<AbstractMessage> actualRequests = mockDeployment.getRequests(); Assert.assertEquals(1, actualRequests.size()); DeleteFrameworkDeploymentRequest actualRequest = ((DeleteFrameworkDeploymentRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void deleteFrameworkDeploymentExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockDeployment.addException(exception); try { String name = "name3373707"; client.deleteFrameworkDeploymentAsync(name).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void getFrameworkDeploymentTest() throws Exception { FrameworkDeployment expectedResponse = FrameworkDeployment.newBuilder() .setName( FrameworkDeploymentName.of("[ORGANIZATION]", "[LOCATION]", "[FRAMEWORK_DEPLOYMENT]") .toString()) .setTargetResourceConfig(TargetResourceConfig.newBuilder().build()) .setComputedTargetResource("computedTargetResource-479139540") .setFramework(FrameworkReference.newBuilder().build()) .setDescription("description-1724546052") .addAllCloudControlMetadata(new ArrayList<CloudControlMetadata>()) .setDeploymentState(DeploymentState.forNumber(0)) .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setEtag("etag3123477") .setTargetResourceDisplayName("targetResourceDisplayName-1474402258") .addAllCloudControlDeploymentReferences( new ArrayList<CloudControlDeploymentReference>()) .build(); mockDeployment.addResponse(expectedResponse); FrameworkDeploymentName name = FrameworkDeploymentName.of("[ORGANIZATION]", "[LOCATION]", "[FRAMEWORK_DEPLOYMENT]"); FrameworkDeployment actualResponse = client.getFrameworkDeployment(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockDeployment.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetFrameworkDeploymentRequest actualRequest = ((GetFrameworkDeploymentRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getFrameworkDeploymentExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockDeployment.addException(exception); try { FrameworkDeploymentName name = FrameworkDeploymentName.of("[ORGANIZATION]", "[LOCATION]", "[FRAMEWORK_DEPLOYMENT]"); client.getFrameworkDeployment(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getFrameworkDeploymentTest2() throws Exception { FrameworkDeployment expectedResponse = FrameworkDeployment.newBuilder() .setName( FrameworkDeploymentName.of("[ORGANIZATION]", "[LOCATION]", "[FRAMEWORK_DEPLOYMENT]") .toString()) .setTargetResourceConfig(TargetResourceConfig.newBuilder().build()) .setComputedTargetResource("computedTargetResource-479139540") .setFramework(FrameworkReference.newBuilder().build()) .setDescription("description-1724546052") .addAllCloudControlMetadata(new ArrayList<CloudControlMetadata>()) .setDeploymentState(DeploymentState.forNumber(0)) .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setEtag("etag3123477") .setTargetResourceDisplayName("targetResourceDisplayName-1474402258") .addAllCloudControlDeploymentReferences( new ArrayList<CloudControlDeploymentReference>()) .build(); mockDeployment.addResponse(expectedResponse); String name = "name3373707"; FrameworkDeployment actualResponse = client.getFrameworkDeployment(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockDeployment.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetFrameworkDeploymentRequest actualRequest = ((GetFrameworkDeploymentRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getFrameworkDeploymentExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockDeployment.addException(exception); try { String name = "name3373707"; client.getFrameworkDeployment(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listFrameworkDeploymentsTest() throws Exception { FrameworkDeployment responsesElement = FrameworkDeployment.newBuilder().build(); ListFrameworkDeploymentsResponse expectedResponse = ListFrameworkDeploymentsResponse.newBuilder() .setNextPageToken("") .addAllFrameworkDeployments(Arrays.asList(responsesElement)) .build(); mockDeployment.addResponse(expectedResponse); OrganizationLocationName parent = OrganizationLocationName.of("[ORGANIZATION]", "[LOCATION]"); ListFrameworkDeploymentsPagedResponse pagedListResponse = client.listFrameworkDeployments(parent); List<FrameworkDeployment> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getFrameworkDeploymentsList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockDeployment.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListFrameworkDeploymentsRequest actualRequest = ((ListFrameworkDeploymentsRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listFrameworkDeploymentsExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockDeployment.addException(exception); try { OrganizationLocationName parent = OrganizationLocationName.of("[ORGANIZATION]", "[LOCATION]"); client.listFrameworkDeployments(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listFrameworkDeploymentsTest2() throws Exception { FrameworkDeployment responsesElement = FrameworkDeployment.newBuilder().build(); ListFrameworkDeploymentsResponse expectedResponse = ListFrameworkDeploymentsResponse.newBuilder() .setNextPageToken("") .addAllFrameworkDeployments(Arrays.asList(responsesElement)) .build(); mockDeployment.addResponse(expectedResponse); String parent = "parent-995424086"; ListFrameworkDeploymentsPagedResponse pagedListResponse = client.listFrameworkDeployments(parent); List<FrameworkDeployment> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getFrameworkDeploymentsList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockDeployment.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListFrameworkDeploymentsRequest actualRequest = ((ListFrameworkDeploymentsRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listFrameworkDeploymentsExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockDeployment.addException(exception); try { String parent = "parent-995424086"; client.listFrameworkDeployments(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getCloudControlDeploymentTest() throws Exception { CloudControlDeployment expectedResponse = CloudControlDeployment.newBuilder() .setName( CloudControlDeploymentName.of( "[ORGANIZATION]", "[LOCATION]", "[CLOUD_CONTROL_DEPLOYMENT]") .toString()) .setTargetResourceConfig(TargetResourceConfig.newBuilder().build()) .setTargetResource("targetResource-1933150017") .setCloudControlMetadata(CloudControlMetadata.newBuilder().build()) .setDescription("description-1724546052") .setDeploymentState(DeploymentState.forNumber(0)) .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setEtag("etag3123477") .setParameterSubstitutedCloudControl(CloudControl.newBuilder().build()) .addAllFrameworkDeploymentReferences(new ArrayList<FrameworkDeploymentReference>()) .setTargetResourceDisplayName("targetResourceDisplayName-1474402258") .build(); mockDeployment.addResponse(expectedResponse); CloudControlDeploymentName name = CloudControlDeploymentName.of("[ORGANIZATION]", "[LOCATION]", "[CLOUD_CONTROL_DEPLOYMENT]"); CloudControlDeployment actualResponse = client.getCloudControlDeployment(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockDeployment.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetCloudControlDeploymentRequest actualRequest = ((GetCloudControlDeploymentRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getCloudControlDeploymentExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockDeployment.addException(exception); try { CloudControlDeploymentName name = CloudControlDeploymentName.of( "[ORGANIZATION]", "[LOCATION]", "[CLOUD_CONTROL_DEPLOYMENT]"); client.getCloudControlDeployment(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getCloudControlDeploymentTest2() throws Exception { CloudControlDeployment expectedResponse = CloudControlDeployment.newBuilder() .setName( CloudControlDeploymentName.of( "[ORGANIZATION]", "[LOCATION]", "[CLOUD_CONTROL_DEPLOYMENT]") .toString()) .setTargetResourceConfig(TargetResourceConfig.newBuilder().build()) .setTargetResource("targetResource-1933150017") .setCloudControlMetadata(CloudControlMetadata.newBuilder().build()) .setDescription("description-1724546052") .setDeploymentState(DeploymentState.forNumber(0)) .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setEtag("etag3123477") .setParameterSubstitutedCloudControl(CloudControl.newBuilder().build()) .addAllFrameworkDeploymentReferences(new ArrayList<FrameworkDeploymentReference>()) .setTargetResourceDisplayName("targetResourceDisplayName-1474402258") .build(); mockDeployment.addResponse(expectedResponse); String name = "name3373707"; CloudControlDeployment actualResponse = client.getCloudControlDeployment(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockDeployment.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetCloudControlDeploymentRequest actualRequest = ((GetCloudControlDeploymentRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getCloudControlDeploymentExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockDeployment.addException(exception); try { String name = "name3373707"; client.getCloudControlDeployment(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listCloudControlDeploymentsTest() throws Exception { CloudControlDeployment responsesElement = CloudControlDeployment.newBuilder().build(); ListCloudControlDeploymentsResponse expectedResponse = ListCloudControlDeploymentsResponse.newBuilder() .setNextPageToken("") .addAllCloudControlDeployments(Arrays.asList(responsesElement)) .build(); mockDeployment.addResponse(expectedResponse); OrganizationLocationName parent = OrganizationLocationName.of("[ORGANIZATION]", "[LOCATION]"); ListCloudControlDeploymentsPagedResponse pagedListResponse = client.listCloudControlDeployments(parent); List<CloudControlDeployment> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getCloudControlDeploymentsList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockDeployment.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListCloudControlDeploymentsRequest actualRequest = ((ListCloudControlDeploymentsRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listCloudControlDeploymentsExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockDeployment.addException(exception); try { OrganizationLocationName parent = OrganizationLocationName.of("[ORGANIZATION]", "[LOCATION]"); client.listCloudControlDeployments(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listCloudControlDeploymentsTest2() throws Exception { CloudControlDeployment responsesElement = CloudControlDeployment.newBuilder().build(); ListCloudControlDeploymentsResponse expectedResponse = ListCloudControlDeploymentsResponse.newBuilder() .setNextPageToken("") .addAllCloudControlDeployments(Arrays.asList(responsesElement)) .build(); mockDeployment.addResponse(expectedResponse); String parent = "parent-995424086"; ListCloudControlDeploymentsPagedResponse pagedListResponse = client.listCloudControlDeployments(parent); List<CloudControlDeployment> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getCloudControlDeploymentsList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockDeployment.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListCloudControlDeploymentsRequest actualRequest = ((ListCloudControlDeploymentsRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listCloudControlDeploymentsExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockDeployment.addException(exception); try { String parent = "parent-995424086"; client.listCloudControlDeployments(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listLocationsTest() throws Exception { Location responsesElement = Location.newBuilder().build(); ListLocationsResponse expectedResponse = ListLocationsResponse.newBuilder() .setNextPageToken("") .addAllLocations(Arrays.asList(responsesElement)) .build(); mockLocations.addResponse(expectedResponse); ListLocationsRequest request = ListLocationsRequest.newBuilder() .setName("name3373707") .setFilter("filter-1274492040") .setPageSize(883849137) .setPageToken("pageToken873572522") .build(); ListLocationsPagedResponse pagedListResponse = client.listLocations(request); List<Location> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getLocationsList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockLocations.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListLocationsRequest actualRequest = ((ListLocationsRequest) actualRequests.get(0)); Assert.assertEquals(request.getName(), actualRequest.getName()); Assert.assertEquals(request.getFilter(), actualRequest.getFilter()); Assert.assertEquals(request.getPageSize(), actualRequest.getPageSize()); Assert.assertEquals(request.getPageToken(), actualRequest.getPageToken()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listLocationsExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockLocations.addException(exception); try { ListLocationsRequest request = ListLocationsRequest.newBuilder() .setName("name3373707") .setFilter("filter-1274492040") .setPageSize(883849137) .setPageToken("pageToken873572522") .build(); client.listLocations(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getLocationTest() throws Exception { Location expectedResponse = Location.newBuilder() .setName("name3373707") .setLocationId("locationId1541836720") .setDisplayName("displayName1714148973") .putAllLabels(new HashMap<String, String>()) .setMetadata(Any.newBuilder().build()) .build(); mockLocations.addResponse(expectedResponse); GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build(); Location actualResponse = client.getLocation(request); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockLocations.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetLocationRequest actualRequest = ((GetLocationRequest) actualRequests.get(0)); Assert.assertEquals(request.getName(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getLocationExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockLocations.addException(exception); try { GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build(); client.getLocation(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } }
googleapis/google-cloud-java
35,162
java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationSpec.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1beta1/explanation.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1beta1; /** * * * <pre> * Specification of Model explanation. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.ExplanationSpec} */ public final class ExplanationSpec extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.ExplanationSpec) ExplanationSpecOrBuilder { private static final long serialVersionUID = 0L; // Use ExplanationSpec.newBuilder() to construct. private ExplanationSpec(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ExplanationSpec() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ExplanationSpec(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.ExplanationProto .internal_static_google_cloud_aiplatform_v1beta1_ExplanationSpec_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.ExplanationProto .internal_static_google_cloud_aiplatform_v1beta1_ExplanationSpec_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.ExplanationSpec.class, com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder.class); } private int bitField0_; public static final int PARAMETERS_FIELD_NUMBER = 1; private com.google.cloud.aiplatform.v1beta1.ExplanationParameters parameters_; /** * * * <pre> * Required. Parameters that configure explaining of the Model's predictions. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the parameters field is set. */ @java.lang.Override public boolean hasParameters() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Parameters that configure explaining of the Model's predictions. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The parameters. */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ExplanationParameters getParameters() { return parameters_ == null ? com.google.cloud.aiplatform.v1beta1.ExplanationParameters.getDefaultInstance() : parameters_; } /** * * * <pre> * Required. Parameters that configure explaining of the Model's predictions. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ExplanationParametersOrBuilder getParametersOrBuilder() { return parameters_ == null ? com.google.cloud.aiplatform.v1beta1.ExplanationParameters.getDefaultInstance() : parameters_; } public static final int METADATA_FIELD_NUMBER = 2; private com.google.cloud.aiplatform.v1beta1.ExplanationMetadata metadata_; /** * * * <pre> * Optional. Metadata describing the Model's input and output for explanation. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ExplanationMetadata metadata = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the metadata field is set. */ @java.lang.Override public boolean hasMetadata() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. Metadata describing the Model's input and output for explanation. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ExplanationMetadata metadata = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The metadata. */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ExplanationMetadata getMetadata() { return metadata_ == null ? com.google.cloud.aiplatform.v1beta1.ExplanationMetadata.getDefaultInstance() : metadata_; } /** * * * <pre> * Optional. Metadata describing the Model's input and output for explanation. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ExplanationMetadata metadata = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOrBuilder getMetadataOrBuilder() { return metadata_ == null ? com.google.cloud.aiplatform.v1beta1.ExplanationMetadata.getDefaultInstance() : metadata_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getParameters()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getMetadata()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getParameters()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getMetadata()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.ExplanationSpec)) { return super.equals(obj); } com.google.cloud.aiplatform.v1beta1.ExplanationSpec other = (com.google.cloud.aiplatform.v1beta1.ExplanationSpec) obj; if (hasParameters() != other.hasParameters()) return false; if (hasParameters()) { if (!getParameters().equals(other.getParameters())) return false; } if (hasMetadata() != other.hasMetadata()) return false; if (hasMetadata()) { if (!getMetadata().equals(other.getMetadata())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasParameters()) { hash = (37 * hash) + PARAMETERS_FIELD_NUMBER; hash = (53 * hash) + getParameters().hashCode(); } if (hasMetadata()) { hash = (37 * hash) + METADATA_FIELD_NUMBER; hash = (53 * hash) + getMetadata().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1beta1.ExplanationSpec parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.ExplanationSpec parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ExplanationSpec parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.ExplanationSpec parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ExplanationSpec parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.ExplanationSpec parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ExplanationSpec parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.ExplanationSpec parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ExplanationSpec parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.ExplanationSpec parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ExplanationSpec parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.ExplanationSpec parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.aiplatform.v1beta1.ExplanationSpec prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Specification of Model explanation. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.ExplanationSpec} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.ExplanationSpec) com.google.cloud.aiplatform.v1beta1.ExplanationSpecOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.ExplanationProto .internal_static_google_cloud_aiplatform_v1beta1_ExplanationSpec_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.ExplanationProto .internal_static_google_cloud_aiplatform_v1beta1_ExplanationSpec_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.ExplanationSpec.class, com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder.class); } // Construct using com.google.cloud.aiplatform.v1beta1.ExplanationSpec.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getParametersFieldBuilder(); getMetadataFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parameters_ = null; if (parametersBuilder_ != null) { parametersBuilder_.dispose(); parametersBuilder_ = null; } metadata_ = null; if (metadataBuilder_ != null) { metadataBuilder_.dispose(); metadataBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1beta1.ExplanationProto .internal_static_google_cloud_aiplatform_v1beta1_ExplanationSpec_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ExplanationSpec getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1beta1.ExplanationSpec.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ExplanationSpec build() { com.google.cloud.aiplatform.v1beta1.ExplanationSpec result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ExplanationSpec buildPartial() { com.google.cloud.aiplatform.v1beta1.ExplanationSpec result = new com.google.cloud.aiplatform.v1beta1.ExplanationSpec(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.aiplatform.v1beta1.ExplanationSpec result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.parameters_ = parametersBuilder_ == null ? parameters_ : parametersBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.metadata_ = metadataBuilder_ == null ? metadata_ : metadataBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1beta1.ExplanationSpec) { return mergeFrom((com.google.cloud.aiplatform.v1beta1.ExplanationSpec) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.ExplanationSpec other) { if (other == com.google.cloud.aiplatform.v1beta1.ExplanationSpec.getDefaultInstance()) return this; if (other.hasParameters()) { mergeParameters(other.getParameters()); } if (other.hasMetadata()) { mergeMetadata(other.getMetadata()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getParametersFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getMetadataFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.aiplatform.v1beta1.ExplanationParameters parameters_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.ExplanationParameters, com.google.cloud.aiplatform.v1beta1.ExplanationParameters.Builder, com.google.cloud.aiplatform.v1beta1.ExplanationParametersOrBuilder> parametersBuilder_; /** * * * <pre> * Required. Parameters that configure explaining of the Model's predictions. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the parameters field is set. */ public boolean hasParameters() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Parameters that configure explaining of the Model's predictions. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The parameters. */ public com.google.cloud.aiplatform.v1beta1.ExplanationParameters getParameters() { if (parametersBuilder_ == null) { return parameters_ == null ? com.google.cloud.aiplatform.v1beta1.ExplanationParameters.getDefaultInstance() : parameters_; } else { return parametersBuilder_.getMessage(); } } /** * * * <pre> * Required. Parameters that configure explaining of the Model's predictions. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setParameters(com.google.cloud.aiplatform.v1beta1.ExplanationParameters value) { if (parametersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } parameters_ = value; } else { parametersBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Parameters that configure explaining of the Model's predictions. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setParameters( com.google.cloud.aiplatform.v1beta1.ExplanationParameters.Builder builderForValue) { if (parametersBuilder_ == null) { parameters_ = builderForValue.build(); } else { parametersBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Parameters that configure explaining of the Model's predictions. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeParameters( com.google.cloud.aiplatform.v1beta1.ExplanationParameters value) { if (parametersBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && parameters_ != null && parameters_ != com.google.cloud.aiplatform.v1beta1.ExplanationParameters.getDefaultInstance()) { getParametersBuilder().mergeFrom(value); } else { parameters_ = value; } } else { parametersBuilder_.mergeFrom(value); } if (parameters_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. Parameters that configure explaining of the Model's predictions. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearParameters() { bitField0_ = (bitField0_ & ~0x00000001); parameters_ = null; if (parametersBuilder_ != null) { parametersBuilder_.dispose(); parametersBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Parameters that configure explaining of the Model's predictions. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1beta1.ExplanationParameters.Builder getParametersBuilder() { bitField0_ |= 0x00000001; onChanged(); return getParametersFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Parameters that configure explaining of the Model's predictions. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1beta1.ExplanationParametersOrBuilder getParametersOrBuilder() { if (parametersBuilder_ != null) { return parametersBuilder_.getMessageOrBuilder(); } else { return parameters_ == null ? com.google.cloud.aiplatform.v1beta1.ExplanationParameters.getDefaultInstance() : parameters_; } } /** * * * <pre> * Required. Parameters that configure explaining of the Model's predictions. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.ExplanationParameters, com.google.cloud.aiplatform.v1beta1.ExplanationParameters.Builder, com.google.cloud.aiplatform.v1beta1.ExplanationParametersOrBuilder> getParametersFieldBuilder() { if (parametersBuilder_ == null) { parametersBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.ExplanationParameters, com.google.cloud.aiplatform.v1beta1.ExplanationParameters.Builder, com.google.cloud.aiplatform.v1beta1.ExplanationParametersOrBuilder>( getParameters(), getParentForChildren(), isClean()); parameters_ = null; } return parametersBuilder_; } private com.google.cloud.aiplatform.v1beta1.ExplanationMetadata metadata_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.ExplanationMetadata, com.google.cloud.aiplatform.v1beta1.ExplanationMetadata.Builder, com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOrBuilder> metadataBuilder_; /** * * * <pre> * Optional. Metadata describing the Model's input and output for explanation. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ExplanationMetadata metadata = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the metadata field is set. */ public boolean hasMetadata() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. Metadata describing the Model's input and output for explanation. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ExplanationMetadata metadata = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The metadata. */ public com.google.cloud.aiplatform.v1beta1.ExplanationMetadata getMetadata() { if (metadataBuilder_ == null) { return metadata_ == null ? com.google.cloud.aiplatform.v1beta1.ExplanationMetadata.getDefaultInstance() : metadata_; } else { return metadataBuilder_.getMessage(); } } /** * * * <pre> * Optional. Metadata describing the Model's input and output for explanation. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ExplanationMetadata metadata = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setMetadata(com.google.cloud.aiplatform.v1beta1.ExplanationMetadata value) { if (metadataBuilder_ == null) { if (value == null) { throw new NullPointerException(); } metadata_ = value; } else { metadataBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Metadata describing the Model's input and output for explanation. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ExplanationMetadata metadata = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setMetadata( com.google.cloud.aiplatform.v1beta1.ExplanationMetadata.Builder builderForValue) { if (metadataBuilder_ == null) { metadata_ = builderForValue.build(); } else { metadataBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Metadata describing the Model's input and output for explanation. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ExplanationMetadata metadata = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder mergeMetadata(com.google.cloud.aiplatform.v1beta1.ExplanationMetadata value) { if (metadataBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && metadata_ != null && metadata_ != com.google.cloud.aiplatform.v1beta1.ExplanationMetadata.getDefaultInstance()) { getMetadataBuilder().mergeFrom(value); } else { metadata_ = value; } } else { metadataBuilder_.mergeFrom(value); } if (metadata_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Optional. Metadata describing the Model's input and output for explanation. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ExplanationMetadata metadata = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder clearMetadata() { bitField0_ = (bitField0_ & ~0x00000002); metadata_ = null; if (metadataBuilder_ != null) { metadataBuilder_.dispose(); metadataBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Optional. Metadata describing the Model's input and output for explanation. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ExplanationMetadata metadata = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.cloud.aiplatform.v1beta1.ExplanationMetadata.Builder getMetadataBuilder() { bitField0_ |= 0x00000002; onChanged(); return getMetadataFieldBuilder().getBuilder(); } /** * * * <pre> * Optional. Metadata describing the Model's input and output for explanation. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ExplanationMetadata metadata = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOrBuilder getMetadataOrBuilder() { if (metadataBuilder_ != null) { return metadataBuilder_.getMessageOrBuilder(); } else { return metadata_ == null ? com.google.cloud.aiplatform.v1beta1.ExplanationMetadata.getDefaultInstance() : metadata_; } } /** * * * <pre> * Optional. Metadata describing the Model's input and output for explanation. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ExplanationMetadata metadata = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.ExplanationMetadata, com.google.cloud.aiplatform.v1beta1.ExplanationMetadata.Builder, com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOrBuilder> getMetadataFieldBuilder() { if (metadataBuilder_ == null) { metadataBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.ExplanationMetadata, com.google.cloud.aiplatform.v1beta1.ExplanationMetadata.Builder, com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOrBuilder>( getMetadata(), getParentForChildren(), isClean()); metadata_ = null; } return metadataBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.ExplanationSpec) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.ExplanationSpec) private static final com.google.cloud.aiplatform.v1beta1.ExplanationSpec DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.ExplanationSpec(); } public static com.google.cloud.aiplatform.v1beta1.ExplanationSpec getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ExplanationSpec> PARSER = new com.google.protobuf.AbstractParser<ExplanationSpec>() { @java.lang.Override public ExplanationSpec parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ExplanationSpec> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ExplanationSpec> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ExplanationSpec getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/rocketmq
35,393
remoting/src/main/java/org/apache/rocketmq/remoting/netty/NettyRemotingAbstract.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.rocketmq.remoting.netty; import io.netty.channel.Channel; import io.netty.channel.ChannelFutureListener; import io.netty.channel.ChannelHandlerContext; import io.netty.handler.ssl.SslContext; import io.netty.handler.ssl.SslHandler; import io.netty.util.concurrent.Future; import io.opentelemetry.api.common.AttributesBuilder; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map.Entry; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import javax.annotation.Nullable; import org.apache.rocketmq.common.AbortProcessException; import org.apache.rocketmq.common.MQVersion; import org.apache.rocketmq.common.Pair; import org.apache.rocketmq.common.ServiceThread; import org.apache.rocketmq.common.UtilAll; import org.apache.rocketmq.common.constant.LoggerName; import org.apache.rocketmq.common.utils.ExceptionUtils; import org.apache.rocketmq.logging.org.slf4j.Logger; import org.apache.rocketmq.logging.org.slf4j.LoggerFactory; import org.apache.rocketmq.remoting.ChannelEventListener; import org.apache.rocketmq.remoting.InvokeCallback; import org.apache.rocketmq.remoting.RPCHook; import org.apache.rocketmq.remoting.common.RemotingHelper; import org.apache.rocketmq.remoting.common.SemaphoreReleaseOnlyOnce; import org.apache.rocketmq.remoting.exception.RemotingSendRequestException; import org.apache.rocketmq.remoting.exception.RemotingTimeoutException; import org.apache.rocketmq.remoting.exception.RemotingTooMuchRequestException; import org.apache.rocketmq.remoting.metrics.RemotingMetricsManager; import org.apache.rocketmq.remoting.pipeline.RequestPipeline; import org.apache.rocketmq.remoting.protocol.RemotingCommand; import org.apache.rocketmq.remoting.protocol.RemotingSysResponseCode; import org.apache.rocketmq.remoting.protocol.ResponseCode; import static org.apache.rocketmq.remoting.metrics.RemotingMetricsConstant.LABEL_IS_LONG_POLLING; import static org.apache.rocketmq.remoting.metrics.RemotingMetricsConstant.LABEL_REQUEST_CODE; import static org.apache.rocketmq.remoting.metrics.RemotingMetricsConstant.LABEL_RESPONSE_CODE; import static org.apache.rocketmq.remoting.metrics.RemotingMetricsConstant.LABEL_RESULT; import static org.apache.rocketmq.remoting.metrics.RemotingMetricsConstant.RESULT_ONEWAY; import static org.apache.rocketmq.remoting.metrics.RemotingMetricsConstant.RESULT_PROCESS_REQUEST_FAILED; import static org.apache.rocketmq.remoting.metrics.RemotingMetricsConstant.RESULT_WRITE_CHANNEL_FAILED; public abstract class NettyRemotingAbstract { /** * Remoting logger instance. */ private static final Logger log = LoggerFactory.getLogger(LoggerName.ROCKETMQ_REMOTING_NAME); /** * Semaphore to limit maximum number of on-going one-way requests, which protects system memory footprint. */ protected final Semaphore semaphoreOneway; /** * Semaphore to limit maximum number of on-going asynchronous requests, which protects system memory footprint. */ protected final Semaphore semaphoreAsync; /** * This map caches all on-going requests. */ protected final ConcurrentMap<Integer /* opaque */, ResponseFuture> responseTable = new ConcurrentHashMap<>(256); /** * This container holds all processors per request code, aka, for each incoming request, we may look up the * responding processor in this map to handle the request. */ protected final HashMap<Integer/* request code */, Pair<NettyRequestProcessor, ExecutorService>> processorTable = new HashMap<>(64); /** * Executor to feed netty events to user defined {@link ChannelEventListener}. */ protected final NettyEventExecutor nettyEventExecutor = new NettyEventExecutor(); /** * The default request processor to use in case there is no exact match in {@link #processorTable} per request * code. */ protected Pair<NettyRequestProcessor, ExecutorService> defaultRequestProcessorPair; /** * SSL context via which to create {@link SslHandler}. */ protected volatile SslContext sslContext; /** * custom rpc hooks */ protected List<RPCHook> rpcHooks = new ArrayList<>(); protected RequestPipeline requestPipeline; protected AtomicBoolean isShuttingDown = new AtomicBoolean(false); /** * Remoting metrics manager instance for this remoting server. */ protected RemotingMetricsManager remotingMetricsManager; static { NettyLogger.initNettyLogger(); } /** * Constructor, specifying capacity of one-way and asynchronous semaphores. * * @param permitsOneway Number of permits for one-way requests. * @param permitsAsync Number of permits for asynchronous requests. */ public NettyRemotingAbstract(final int permitsOneway, final int permitsAsync) { this.semaphoreOneway = new Semaphore(permitsOneway, true); this.semaphoreAsync = new Semaphore(permitsAsync, true); } /** * Custom channel event listener. * * @return custom channel event listener if defined; null otherwise. */ public abstract ChannelEventListener getChannelEventListener(); /** * Set the remoting metrics manager for this remoting server. * * @param remotingMetricsManager the remoting metrics manager instance */ public void setRemotingMetricsManager(RemotingMetricsManager remotingMetricsManager) { this.remotingMetricsManager = remotingMetricsManager; } /** * Get the remoting metrics manager for this remoting server. * * @return the remoting metrics manager instance */ public RemotingMetricsManager getRemotingMetricsManager() { return remotingMetricsManager; } /** * Put a netty event to the executor. * * @param event Netty event instance. */ public void putNettyEvent(final NettyEvent event) { this.nettyEventExecutor.putNettyEvent(event); } /** * Entry of incoming command processing. * * <p> * <strong>Note:</strong> * The incoming remoting command may be * <ul> * <li>An inquiry request from a remote peer component;</li> * <li>A response to a previous request issued by this very participant.</li> * </ul> * </p> * * @param ctx Channel handler context. * @param msg incoming remoting command. */ public void processMessageReceived(ChannelHandlerContext ctx, RemotingCommand msg) { if (msg != null) { switch (msg.getType()) { case REQUEST_COMMAND: processRequestCommand(ctx, msg); break; case RESPONSE_COMMAND: processResponseCommand(ctx, msg); break; default: break; } } } protected void doBeforeRpcHooks(String addr, RemotingCommand request) { if (rpcHooks.size() > 0) { for (RPCHook rpcHook : rpcHooks) { rpcHook.doBeforeRequest(addr, request); } } } public void doAfterRpcHooks(String addr, RemotingCommand request, RemotingCommand response) { if (rpcHooks.size() > 0) { for (RPCHook rpcHook : rpcHooks) { rpcHook.doAfterResponse(addr, request, response); } } } public static void writeResponse(Channel channel, RemotingCommand request, @Nullable RemotingCommand response, Consumer<Future<?>> callback, RemotingMetricsManager remotingMetricsManager) { if (response == null) { return; } final AttributesBuilder attributesBuilder; if (remotingMetricsManager != null) { attributesBuilder = remotingMetricsManager.newAttributesBuilder(); attributesBuilder.put(LABEL_IS_LONG_POLLING, request.isSuspended()) .put(LABEL_REQUEST_CODE, RemotingHelper.getRequestCodeDesc(request.getCode())) .put(LABEL_RESPONSE_CODE, RemotingHelper.getResponseCodeDesc(response.getCode())); } else { attributesBuilder = null; } if (request.isOnewayRPC()) { if (attributesBuilder != null) { attributesBuilder.put(LABEL_RESULT, RESULT_ONEWAY); remotingMetricsManager.getRpcLatency().record(request.getProcessTimer().elapsed(TimeUnit.MILLISECONDS), attributesBuilder.build()); } return; } response.setOpaque(request.getOpaque()); response.markResponseType(); try { channel.writeAndFlush(response).addListener((ChannelFutureListener) future -> { if (future.isSuccess()) { log.debug("Response[request code: {}, response code: {}, opaque: {}] is written to channel{}", request.getCode(), response.getCode(), response.getOpaque(), channel); } else { log.error("Failed to write response[request code: {}, response code: {}, opaque: {}] to channel{}", request.getCode(), response.getCode(), response.getOpaque(), channel, future.cause()); } if (remotingMetricsManager != null) { attributesBuilder.put(LABEL_RESULT, remotingMetricsManager.getWriteAndFlushResult(future)); remotingMetricsManager.getRpcLatency().record(request.getProcessTimer().elapsed(TimeUnit.MILLISECONDS), attributesBuilder.build()); } if (callback != null) { callback.accept(future); } }); } catch (Throwable e) { log.error("process request over, but response failed", e); log.error(request.toString()); log.error(response.toString()); if (remotingMetricsManager != null) { attributesBuilder.put(LABEL_RESULT, RESULT_WRITE_CHANNEL_FAILED); remotingMetricsManager.getRpcLatency().record(request.getProcessTimer().elapsed(TimeUnit.MILLISECONDS), attributesBuilder.build()); } } } public void writeResponse(Channel channel, RemotingCommand request, @Nullable RemotingCommand response, Consumer<Future<?>> callback) { if (response == null) { return; } final AttributesBuilder attributesBuilder; if (this.remotingMetricsManager != null) { attributesBuilder = this.remotingMetricsManager.newAttributesBuilder(); attributesBuilder.put(LABEL_IS_LONG_POLLING, request.isSuspended()) .put(LABEL_REQUEST_CODE, RemotingHelper.getRequestCodeDesc(request.getCode())) .put(LABEL_RESPONSE_CODE, RemotingHelper.getResponseCodeDesc(response.getCode())); } else { attributesBuilder = null; } if (request.isOnewayRPC()) { if (attributesBuilder != null) { attributesBuilder.put(LABEL_RESULT, RESULT_ONEWAY); this.remotingMetricsManager.getRpcLatency().record(request.getProcessTimer().elapsed(TimeUnit.MILLISECONDS), attributesBuilder.build()); } return; } response.setOpaque(request.getOpaque()); response.markResponseType(); try { channel.writeAndFlush(response).addListener((ChannelFutureListener) future -> { if (future.isSuccess()) { log.debug("Response[request code: {}, response code: {}, opaque: {}] is written to channel{}", request.getCode(), response.getCode(), response.getOpaque(), channel); } else { log.error("Failed to write response[request code: {}, response code: {}, opaque: {}] to channel{}", request.getCode(), response.getCode(), response.getOpaque(), channel, future.cause()); } if (this.remotingMetricsManager != null && attributesBuilder != null) { attributesBuilder.put(LABEL_RESULT, this.remotingMetricsManager.getWriteAndFlushResult(future)); this.remotingMetricsManager.getRpcLatency().record(request.getProcessTimer().elapsed(TimeUnit.MILLISECONDS), attributesBuilder.build()); } if (callback != null) { callback.accept(future); } }); } catch (Throwable e) { log.error("process request over, but response failed", e); log.error(request.toString()); log.error(response.toString()); if (this.remotingMetricsManager != null && attributesBuilder != null) { attributesBuilder.put(LABEL_RESULT, RESULT_WRITE_CHANNEL_FAILED); this.remotingMetricsManager.getRpcLatency().record(request.getProcessTimer().elapsed(TimeUnit.MILLISECONDS), attributesBuilder.build()); } } } /** * Process incoming request command issued by remote peer. * * @param ctx channel handler context. * @param cmd request command. */ public void processRequestCommand(final ChannelHandlerContext ctx, final RemotingCommand cmd) { final Pair<NettyRequestProcessor, ExecutorService> matched = this.processorTable.get(cmd.getCode()); final Pair<NettyRequestProcessor, ExecutorService> pair = null == matched ? this.defaultRequestProcessorPair : matched; final int opaque = cmd.getOpaque(); if (pair == null) { String error = " request type " + cmd.getCode() + " not supported"; final RemotingCommand response = RemotingCommand.createResponseCommand(RemotingSysResponseCode.REQUEST_CODE_NOT_SUPPORTED, error); response.setOpaque(opaque); this.writeResponse(ctx.channel(), cmd, response, null); log.error(RemotingHelper.parseChannelRemoteAddr(ctx.channel()) + error); return; } Runnable run = buildProcessRequestHandler(ctx, cmd, pair, opaque); if (isShuttingDown.get()) { if (cmd.getVersion() > MQVersion.Version.V5_3_1.ordinal()) { final RemotingCommand response = RemotingCommand.createResponseCommand(ResponseCode.GO_AWAY, "please go away"); response.setOpaque(opaque); this.writeResponse(ctx.channel(), cmd, response, null); log.info("proxy is shutting down, write response GO_AWAY. channel={}, requestCode={}, opaque={}", ctx.channel(), cmd.getCode(), opaque); return; } } if (pair.getObject1().rejectRequest()) { final RemotingCommand response = RemotingCommand.createResponseCommand(RemotingSysResponseCode.SYSTEM_BUSY, "[REJECTREQUEST]system busy, start flow control for a while"); response.setOpaque(opaque); this.writeResponse(ctx.channel(), cmd, response, null); return; } try { final RequestTask requestTask = new RequestTask(run, ctx.channel(), cmd); //async execute task, current thread return directly pair.getObject2().submit(requestTask); } catch (RejectedExecutionException e) { if ((System.currentTimeMillis() % 10000) == 0) { log.warn(RemotingHelper.parseChannelRemoteAddr(ctx.channel()) + ", too many requests and system thread pool busy, RejectedExecutionException " + pair.getObject2().toString() + " request code: " + cmd.getCode()); } final RemotingCommand response = RemotingCommand.createResponseCommand(RemotingSysResponseCode.SYSTEM_BUSY, "[OVERLOAD]system busy, start flow control for a while"); response.setOpaque(opaque); this.writeResponse(ctx.channel(), cmd, response, null); } catch (Throwable e) { if (remotingMetricsManager != null) { AttributesBuilder attributesBuilder = remotingMetricsManager.newAttributesBuilder() .put(LABEL_REQUEST_CODE, RemotingHelper.getRequestCodeDesc(cmd.getCode())) .put(LABEL_RESULT, RESULT_PROCESS_REQUEST_FAILED); remotingMetricsManager.getRpcLatency().record(cmd.getProcessTimer().elapsed(TimeUnit.MILLISECONDS), attributesBuilder.build()); } } } private Runnable buildProcessRequestHandler(ChannelHandlerContext ctx, RemotingCommand cmd, Pair<NettyRequestProcessor, ExecutorService> pair, int opaque) { return () -> { Exception exception = null; RemotingCommand response; String remoteAddr = null; try { remoteAddr = RemotingHelper.parseChannelRemoteAddr(ctx.channel()); try { doBeforeRpcHooks(remoteAddr, cmd); } catch (AbortProcessException e) { throw e; } catch (Exception e) { exception = e; } if (this.requestPipeline != null) { this.requestPipeline.execute(ctx, cmd); } if (exception == null) { response = pair.getObject1().processRequest(ctx, cmd); } else { response = RemotingCommand.createResponseCommand(RemotingSysResponseCode.SYSTEM_ERROR, null); } try { doAfterRpcHooks(remoteAddr, cmd, response); } catch (AbortProcessException e) { throw e; } catch (Exception e) { exception = e; } if (exception != null) { throw exception; } this.writeResponse(ctx.channel(), cmd, response, null); } catch (AbortProcessException e) { response = RemotingCommand.createResponseCommand(e.getResponseCode(), e.getErrorMessage()); response.setOpaque(opaque); this.writeResponse(ctx.channel(), cmd, response, null); } catch (Throwable e) { log.error("process request exception, remoteAddr: {}", remoteAddr, e); log.error(cmd.toString()); if (!cmd.isOnewayRPC()) { response = RemotingCommand.createResponseCommand(RemotingSysResponseCode.SYSTEM_ERROR, UtilAll.exceptionSimpleDesc(e)); response.setOpaque(opaque); this.writeResponse(ctx.channel(), cmd, response, null); } } }; } /** * Process response from remote peer to the previous issued requests. * * @param ctx channel handler context. * @param cmd response command instance. */ public void processResponseCommand(ChannelHandlerContext ctx, RemotingCommand cmd) { final int opaque = cmd.getOpaque(); final ResponseFuture responseFuture = responseTable.get(opaque); if (responseFuture != null) { responseFuture.setResponseCommand(cmd); responseTable.remove(opaque); if (responseFuture.getInvokeCallback() != null) { executeInvokeCallback(responseFuture); } else { responseFuture.putResponse(cmd); responseFuture.release(); } } else { log.warn("receive response, cmd={}, but not matched any request, address={}, channelId={}", cmd, RemotingHelper.parseChannelRemoteAddr(ctx.channel()), ctx.channel().id()); } } /** * Execute callback in callback executor. If callback executor is null, run directly in current thread */ private void executeInvokeCallback(final ResponseFuture responseFuture) { boolean runInThisThread = false; ExecutorService executor = this.getCallbackExecutor(); if (executor != null && !executor.isShutdown()) { try { executor.submit(() -> { try { responseFuture.executeInvokeCallback(); } catch (Throwable e) { log.warn("execute callback in executor exception, and callback throw", e); } finally { responseFuture.release(); } }); } catch (Exception e) { runInThisThread = true; log.warn("execute callback in executor exception, maybe executor busy", e); } } else { runInThisThread = true; } if (runInThisThread) { try { responseFuture.executeInvokeCallback(); } catch (Throwable e) { log.warn("executeInvokeCallback Exception", e); } finally { responseFuture.release(); } } } /** * Custom RPC hooks. * * @return RPC hooks if specified; null otherwise. */ public List<RPCHook> getRPCHook() { return rpcHooks; } public void registerRPCHook(RPCHook rpcHook) { if (rpcHook != null && !rpcHooks.contains(rpcHook)) { rpcHooks.add(rpcHook); } } public void setRequestPipeline(RequestPipeline pipeline) { this.requestPipeline = pipeline; } public void clearRPCHook() { rpcHooks.clear(); } /** * This method specifies thread pool to use while invoking callback methods. * * @return Dedicated thread pool instance if specified; or null if the callback is supposed to be executed in the * netty event-loop thread. */ public abstract ExecutorService getCallbackExecutor(); /** * <p> * This method is periodically invoked to scan and expire deprecated request. * </p> */ public void scanResponseTable() { final List<ResponseFuture> rfList = new LinkedList<>(); Iterator<Entry<Integer, ResponseFuture>> it = this.responseTable.entrySet().iterator(); while (it.hasNext()) { Entry<Integer, ResponseFuture> next = it.next(); ResponseFuture rep = next.getValue(); if ((rep.getBeginTimestamp() + rep.getTimeoutMillis() + 1000) <= System.currentTimeMillis()) { rep.release(); it.remove(); rfList.add(rep); log.warn("remove timeout request, " + rep); } } for (ResponseFuture rf : rfList) { try { executeInvokeCallback(rf); } catch (Throwable e) { log.warn("scanResponseTable, operationComplete Exception", e); } } } public RemotingCommand invokeSyncImpl(final Channel channel, final RemotingCommand request, final long timeoutMillis) throws InterruptedException, RemotingSendRequestException, RemotingTimeoutException { try { return invokeImpl(channel, request, timeoutMillis).thenApply(ResponseFuture::getResponseCommand) .get(timeoutMillis, TimeUnit.MILLISECONDS); } catch (ExecutionException e) { throw new RemotingSendRequestException(channel.remoteAddress().toString(), e.getCause()); } catch (TimeoutException e) { throw new RemotingTimeoutException(channel.remoteAddress().toString(), timeoutMillis, e.getCause()); } } public CompletableFuture<ResponseFuture> invokeImpl(final Channel channel, final RemotingCommand request, final long timeoutMillis) { return invoke0(channel, request, timeoutMillis); } protected CompletableFuture<ResponseFuture> invoke0(final Channel channel, final RemotingCommand request, final long timeoutMillis) { CompletableFuture<ResponseFuture> future = new CompletableFuture<>(); long beginStartTime = System.currentTimeMillis(); final int opaque = request.getOpaque(); boolean acquired; try { acquired = this.semaphoreAsync.tryAcquire(timeoutMillis, TimeUnit.MILLISECONDS); } catch (Throwable t) { future.completeExceptionally(t); return future; } if (acquired) { final SemaphoreReleaseOnlyOnce once = new SemaphoreReleaseOnlyOnce(this.semaphoreAsync); long costTime = System.currentTimeMillis() - beginStartTime; if (timeoutMillis < costTime) { once.release(); future.completeExceptionally(new RemotingTimeoutException("invokeAsyncImpl call timeout")); return future; } AtomicReference<ResponseFuture> responseFutureReference = new AtomicReference<>(); final ResponseFuture responseFuture = new ResponseFuture(channel, opaque, request, timeoutMillis - costTime, new InvokeCallback() { @Override public void operationComplete(ResponseFuture responseFuture) { } @Override public void operationSucceed(RemotingCommand response) { future.complete(responseFutureReference.get()); } @Override public void operationFail(Throwable throwable) { future.completeExceptionally(throwable); } }, once); responseFutureReference.set(responseFuture); this.responseTable.put(opaque, responseFuture); try { channel.writeAndFlush(request).addListener((ChannelFutureListener) f -> { if (f.isSuccess()) { responseFuture.setSendRequestOK(true); return; } requestFail(opaque); log.warn("send a request command to channel <{}>, channelId={}, failed.", RemotingHelper.parseChannelRemoteAddr(channel), channel.id()); }); return future; } catch (Exception e) { responseTable.remove(opaque); responseFuture.release(); log.warn("send a request command to channel <{}> channelId={} Exception", RemotingHelper.parseChannelRemoteAddr(channel), channel.id(), e); future.completeExceptionally(new RemotingSendRequestException(RemotingHelper.parseChannelRemoteAddr(channel), e)); return future; } } else { if (timeoutMillis <= 0) { future.completeExceptionally(new RemotingTooMuchRequestException("invokeAsyncImpl invoke too fast")); } else { String info = String.format("invokeAsyncImpl tryAcquire semaphore timeout, %dms, waiting thread nums: %d semaphoreAsyncValue: %d", timeoutMillis, this.semaphoreAsync.getQueueLength(), this.semaphoreAsync.availablePermits() ); log.warn(info); future.completeExceptionally(new RemotingTimeoutException(info)); } return future; } } public void invokeAsyncImpl(final Channel channel, final RemotingCommand request, final long timeoutMillis, final InvokeCallback invokeCallback) { invokeImpl(channel, request, timeoutMillis) .whenComplete((v, t) -> { if (t == null) { invokeCallback.operationComplete(v); } else { ResponseFuture responseFuture = new ResponseFuture(channel, request.getOpaque(), request, timeoutMillis, null, null); responseFuture.setCause(t); invokeCallback.operationComplete(responseFuture); } }) .thenAccept(responseFuture -> invokeCallback.operationSucceed(responseFuture.getResponseCommand())) .exceptionally(t -> { invokeCallback.operationFail(ExceptionUtils.getRealException(t)); return null; }); } private void requestFail(final int opaque) { ResponseFuture responseFuture = responseTable.remove(opaque); if (responseFuture != null) { responseFuture.setSendRequestOK(false); responseFuture.putResponse(null); try { executeInvokeCallback(responseFuture); } catch (Throwable e) { log.warn("execute callback in requestFail, and callback throw", e); } finally { responseFuture.release(); } } } /** * mark the request of the specified channel as fail and to invoke fail callback immediately * * @param channel the channel which is close already */ protected void failFast(final Channel channel) { for (Entry<Integer, ResponseFuture> entry : responseTable.entrySet()) { if (entry.getValue().getChannel() == channel) { Integer opaque = entry.getKey(); if (opaque != null) { requestFail(opaque); } } } } public void invokeOnewayImpl(final Channel channel, final RemotingCommand request, final long timeoutMillis) throws InterruptedException, RemotingTooMuchRequestException, RemotingTimeoutException, RemotingSendRequestException { request.markOnewayRPC(); boolean acquired = this.semaphoreOneway.tryAcquire(timeoutMillis, TimeUnit.MILLISECONDS); if (acquired) { final SemaphoreReleaseOnlyOnce once = new SemaphoreReleaseOnlyOnce(this.semaphoreOneway); try { channel.writeAndFlush(request).addListener((ChannelFutureListener) f -> { once.release(); if (!f.isSuccess()) { log.warn("send a request command to channel <" + channel.remoteAddress() + "> failed."); } }); } catch (Exception e) { once.release(); log.warn("write send a request command to channel <" + channel.remoteAddress() + "> failed."); throw new RemotingSendRequestException(RemotingHelper.parseChannelRemoteAddr(channel), e); } } else { if (timeoutMillis <= 0) { throw new RemotingTooMuchRequestException("invokeOnewayImpl invoke too fast"); } else { String info = String.format( "invokeOnewayImpl tryAcquire semaphore timeout, %dms, waiting thread nums: %d semaphoreOnewayValue: %d", timeoutMillis, this.semaphoreOneway.getQueueLength(), this.semaphoreOneway.availablePermits() ); log.warn(info); throw new RemotingTimeoutException(info); } } } public HashMap<Integer, Pair<NettyRequestProcessor, ExecutorService>> getProcessorTable() { return processorTable; } class NettyEventExecutor extends ServiceThread { private final LinkedBlockingQueue<NettyEvent> eventQueue = new LinkedBlockingQueue<>(); public void putNettyEvent(final NettyEvent event) { int currentSize = this.eventQueue.size(); int maxSize = 10000; if (currentSize <= maxSize) { this.eventQueue.add(event); } else { log.warn("event queue size [{}] over the limit [{}], so drop this event {}", currentSize, maxSize, event.toString()); } } @Override public void run() { log.info(this.getServiceName() + " service started"); final ChannelEventListener listener = NettyRemotingAbstract.this.getChannelEventListener(); while (!this.isStopped()) { try { NettyEvent event = this.eventQueue.poll(3000, TimeUnit.MILLISECONDS); if (event != null && listener != null) { switch (event.getType()) { case IDLE: listener.onChannelIdle(event.getRemoteAddr(), event.getChannel()); break; case CLOSE: listener.onChannelClose(event.getRemoteAddr(), event.getChannel()); break; case CONNECT: listener.onChannelConnect(event.getRemoteAddr(), event.getChannel()); break; case EXCEPTION: listener.onChannelException(event.getRemoteAddr(), event.getChannel()); break; case ACTIVE: listener.onChannelActive(event.getRemoteAddr(), event.getChannel()); break; default: break; } } } catch (Exception e) { log.warn(this.getServiceName() + " service has exception. ", e); } } log.info(this.getServiceName() + " service end"); } @Override public String getServiceName() { return NettyEventExecutor.class.getSimpleName(); } } }
googleapis/google-cloud-java
35,184
java-dialogflow-cx/proto-google-cloud-dialogflow-cx-v3/src/main/java/com/google/cloud/dialogflow/cx/v3/CreateSessionEntityTypeRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/cx/v3/session_entity_type.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dialogflow.cx.v3; /** * * * <pre> * The request message for * [SessionEntityTypes.CreateSessionEntityType][google.cloud.dialogflow.cx.v3.SessionEntityTypes.CreateSessionEntityType]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest} */ public final class CreateSessionEntityTypeRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest) CreateSessionEntityTypeRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateSessionEntityTypeRequest.newBuilder() to construct. private CreateSessionEntityTypeRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateSessionEntityTypeRequest() { parent_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateSessionEntityTypeRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3.SessionEntityTypeProto .internal_static_google_cloud_dialogflow_cx_v3_CreateSessionEntityTypeRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3.SessionEntityTypeProto .internal_static_google_cloud_dialogflow_cx_v3_CreateSessionEntityTypeRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest.class, com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The session to create a session entity type for. * Format: * `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;/sessions/&lt;SessionID&gt;` * or * `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;/environments/&lt;EnvironmentID&gt;/sessions/&lt;SessionID&gt;`. * If `Environment ID` is not specified, we assume default 'draft' * environment. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The session to create a session entity type for. * Format: * `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;/sessions/&lt;SessionID&gt;` * or * `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;/environments/&lt;EnvironmentID&gt;/sessions/&lt;SessionID&gt;`. * If `Environment ID` is not specified, we assume default 'draft' * environment. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int SESSION_ENTITY_TYPE_FIELD_NUMBER = 2; private com.google.cloud.dialogflow.cx.v3.SessionEntityType sessionEntityType_; /** * * * <pre> * Required. The session entity type to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.SessionEntityType session_entity_type = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the sessionEntityType field is set. */ @java.lang.Override public boolean hasSessionEntityType() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The session entity type to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.SessionEntityType session_entity_type = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The sessionEntityType. */ @java.lang.Override public com.google.cloud.dialogflow.cx.v3.SessionEntityType getSessionEntityType() { return sessionEntityType_ == null ? com.google.cloud.dialogflow.cx.v3.SessionEntityType.getDefaultInstance() : sessionEntityType_; } /** * * * <pre> * Required. The session entity type to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.SessionEntityType session_entity_type = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.dialogflow.cx.v3.SessionEntityTypeOrBuilder getSessionEntityTypeOrBuilder() { return sessionEntityType_ == null ? com.google.cloud.dialogflow.cx.v3.SessionEntityType.getDefaultInstance() : sessionEntityType_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getSessionEntityType()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getSessionEntityType()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest)) { return super.equals(obj); } com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest other = (com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest) obj; if (!getParent().equals(other.getParent())) return false; if (hasSessionEntityType() != other.hasSessionEntityType()) return false; if (hasSessionEntityType()) { if (!getSessionEntityType().equals(other.getSessionEntityType())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (hasSessionEntityType()) { hash = (37 * hash) + SESSION_ENTITY_TYPE_FIELD_NUMBER; hash = (53 * hash) + getSessionEntityType().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The request message for * [SessionEntityTypes.CreateSessionEntityType][google.cloud.dialogflow.cx.v3.SessionEntityTypes.CreateSessionEntityType]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest) com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3.SessionEntityTypeProto .internal_static_google_cloud_dialogflow_cx_v3_CreateSessionEntityTypeRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3.SessionEntityTypeProto .internal_static_google_cloud_dialogflow_cx_v3_CreateSessionEntityTypeRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest.class, com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest.Builder.class); } // Construct using com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getSessionEntityTypeFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; sessionEntityType_ = null; if (sessionEntityTypeBuilder_ != null) { sessionEntityTypeBuilder_.dispose(); sessionEntityTypeBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dialogflow.cx.v3.SessionEntityTypeProto .internal_static_google_cloud_dialogflow_cx_v3_CreateSessionEntityTypeRequest_descriptor; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest getDefaultInstanceForType() { return com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest build() { com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest buildPartial() { com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest result = new com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.sessionEntityType_ = sessionEntityTypeBuilder_ == null ? sessionEntityType_ : sessionEntityTypeBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest) { return mergeFrom((com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest other) { if (other == com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasSessionEntityType()) { mergeSessionEntityType(other.getSessionEntityType()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getSessionEntityTypeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The session to create a session entity type for. * Format: * `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;/sessions/&lt;SessionID&gt;` * or * `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;/environments/&lt;EnvironmentID&gt;/sessions/&lt;SessionID&gt;`. * If `Environment ID` is not specified, we assume default 'draft' * environment. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The session to create a session entity type for. * Format: * `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;/sessions/&lt;SessionID&gt;` * or * `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;/environments/&lt;EnvironmentID&gt;/sessions/&lt;SessionID&gt;`. * If `Environment ID` is not specified, we assume default 'draft' * environment. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The session to create a session entity type for. * Format: * `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;/sessions/&lt;SessionID&gt;` * or * `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;/environments/&lt;EnvironmentID&gt;/sessions/&lt;SessionID&gt;`. * If `Environment ID` is not specified, we assume default 'draft' * environment. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The session to create a session entity type for. * Format: * `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;/sessions/&lt;SessionID&gt;` * or * `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;/environments/&lt;EnvironmentID&gt;/sessions/&lt;SessionID&gt;`. * If `Environment ID` is not specified, we assume default 'draft' * environment. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The session to create a session entity type for. * Format: * `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;/sessions/&lt;SessionID&gt;` * or * `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;/environments/&lt;EnvironmentID&gt;/sessions/&lt;SessionID&gt;`. * If `Environment ID` is not specified, we assume default 'draft' * environment. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.cloud.dialogflow.cx.v3.SessionEntityType sessionEntityType_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.cx.v3.SessionEntityType, com.google.cloud.dialogflow.cx.v3.SessionEntityType.Builder, com.google.cloud.dialogflow.cx.v3.SessionEntityTypeOrBuilder> sessionEntityTypeBuilder_; /** * * * <pre> * Required. The session entity type to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.SessionEntityType session_entity_type = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the sessionEntityType field is set. */ public boolean hasSessionEntityType() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The session entity type to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.SessionEntityType session_entity_type = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The sessionEntityType. */ public com.google.cloud.dialogflow.cx.v3.SessionEntityType getSessionEntityType() { if (sessionEntityTypeBuilder_ == null) { return sessionEntityType_ == null ? com.google.cloud.dialogflow.cx.v3.SessionEntityType.getDefaultInstance() : sessionEntityType_; } else { return sessionEntityTypeBuilder_.getMessage(); } } /** * * * <pre> * Required. The session entity type to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.SessionEntityType session_entity_type = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setSessionEntityType(com.google.cloud.dialogflow.cx.v3.SessionEntityType value) { if (sessionEntityTypeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } sessionEntityType_ = value; } else { sessionEntityTypeBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The session entity type to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.SessionEntityType session_entity_type = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setSessionEntityType( com.google.cloud.dialogflow.cx.v3.SessionEntityType.Builder builderForValue) { if (sessionEntityTypeBuilder_ == null) { sessionEntityType_ = builderForValue.build(); } else { sessionEntityTypeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The session entity type to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.SessionEntityType session_entity_type = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeSessionEntityType( com.google.cloud.dialogflow.cx.v3.SessionEntityType value) { if (sessionEntityTypeBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && sessionEntityType_ != null && sessionEntityType_ != com.google.cloud.dialogflow.cx.v3.SessionEntityType.getDefaultInstance()) { getSessionEntityTypeBuilder().mergeFrom(value); } else { sessionEntityType_ = value; } } else { sessionEntityTypeBuilder_.mergeFrom(value); } if (sessionEntityType_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The session entity type to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.SessionEntityType session_entity_type = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearSessionEntityType() { bitField0_ = (bitField0_ & ~0x00000002); sessionEntityType_ = null; if (sessionEntityTypeBuilder_ != null) { sessionEntityTypeBuilder_.dispose(); sessionEntityTypeBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The session entity type to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.SessionEntityType session_entity_type = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dialogflow.cx.v3.SessionEntityType.Builder getSessionEntityTypeBuilder() { bitField0_ |= 0x00000002; onChanged(); return getSessionEntityTypeFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The session entity type to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.SessionEntityType session_entity_type = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dialogflow.cx.v3.SessionEntityTypeOrBuilder getSessionEntityTypeOrBuilder() { if (sessionEntityTypeBuilder_ != null) { return sessionEntityTypeBuilder_.getMessageOrBuilder(); } else { return sessionEntityType_ == null ? com.google.cloud.dialogflow.cx.v3.SessionEntityType.getDefaultInstance() : sessionEntityType_; } } /** * * * <pre> * Required. The session entity type to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.SessionEntityType session_entity_type = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.cx.v3.SessionEntityType, com.google.cloud.dialogflow.cx.v3.SessionEntityType.Builder, com.google.cloud.dialogflow.cx.v3.SessionEntityTypeOrBuilder> getSessionEntityTypeFieldBuilder() { if (sessionEntityTypeBuilder_ == null) { sessionEntityTypeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.cx.v3.SessionEntityType, com.google.cloud.dialogflow.cx.v3.SessionEntityType.Builder, com.google.cloud.dialogflow.cx.v3.SessionEntityTypeOrBuilder>( getSessionEntityType(), getParentForChildren(), isClean()); sessionEntityType_ = null; } return sessionEntityTypeBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest) } // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest) private static final com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest(); } public static com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateSessionEntityTypeRequest> PARSER = new com.google.protobuf.AbstractParser<CreateSessionEntityTypeRequest>() { @java.lang.Override public CreateSessionEntityTypeRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateSessionEntityTypeRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateSessionEntityTypeRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.CreateSessionEntityTypeRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/solr
34,715
solr/core/src/test/org/apache/solr/handler/component/FacetPivotSmallTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.handler.component; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.params.FacetParams; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.SolrParams; import org.apache.solr.request.SolrQueryRequest; import org.junit.BeforeClass; /** Single node testing of pivot facets */ public class FacetPivotSmallTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { // we need DVs on point fields to compute stats & facets if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP, "true"); initCore("solrconfig.xml", "schema11.xml"); } @Override public void setUp() throws Exception { super.setUp(); clearIndex(); assertU(commit()); } /** we don't support comma's in the "stats" local param ... yet: SOLR-6663 */ public void testStatsTagHasComma() { if (random().nextBoolean()) { // behavior should be same either way index(); } assertQEx( "Can't use multiple tags in stats local param until SOLR-6663 is decided", req( "q", "*:*", "facet", "true", "stats", "true", "stats.field", "{!tag=foo}price_ti", "stats.field", "{!tag=bar}id", "facet.pivot", "{!stats=foo,bar}place_t,company_t"), 400); } /** if bogus stats are requested, the pivots should still work */ public void testBogusStatsTag() { index(); assertQ( // check we still get pivots... req("q", "*:*", "facet", "true", "facet.pivot", "{!stats=bogus}place_t,company_t"), "//arr[@name='place_t,company_t']/lst[str[@name='value'][.='dublin']]", // .. but sanity check we don't have any stats "count(//arr[@name='place_t,company_t']/lst[str[@name='value'][.='dublin']]/lst[@name='stats'])=0"); } public void testPivotFacetUnsorted() { index(); final ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", "*:*"); params.add("facet", "true"); params.add("facet.pivot", "place_t,company_t"); SolrQueryRequest req = req(params); final String facetPivotPrefix = "//lst[@name='facet_counts']/lst[@name='facet_pivot']/arr[@name='place_t,company_t']/lst"; assertQ( req, facetPivotPrefix + "/str[@name='field'][.='place_t']", // dublin facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='microsoft']", facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=4]", facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='polecat']", facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=4]", facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='null']", facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=3]", facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='fujitsu']", facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=2]", facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[5]/str[@name='value'][.='bbc']", facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[5]/int[@name='count'][.=1]", // london facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='null']", facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=3]", facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='polecat']", facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=3]", facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='bbc']", facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=2]", facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='fujitsu']", facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=2]", facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[5]/str[@name='value'][.='microsoft']", facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[5]/int[@name='count'][.=2]", // cardiff facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='polecat']", facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=3]", facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='bbc']", facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=2]", facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='microsoft']", facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=2]", facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='null']", facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=2]", facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[5]/str[@name='value'][.='fujitsu']", facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[5]/int[@name='count'][.=1]", // krakow facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='null']", facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=3]", facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='bbc']", facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=2]", facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='polecat']", facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=2]", facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='fujitsu']", facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=1]", facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[5]/str[@name='value'][.='microsoft']", facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[5]/int[@name='count'][.=1]", // la facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='fujitsu']", facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=2]", facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='microsoft']", facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=2]", facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='null']", facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=2]", facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='polecat']", facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=2]", facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[5]/str[@name='value'][.='bbc']", facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[5]/int[@name='count'][.=1]", // cork facetPivotPrefix + "[str[@name='value'][.='cork']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='fujitsu']", facetPivotPrefix + "[str[@name='value'][.='cork']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=1]", facetPivotPrefix + "[str[@name='value'][.='cork']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='rte']", facetPivotPrefix + "[str[@name='value'][.='cork']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=1]"); } public void testPivotFacetStatsUnsortedTagged() { index(); final ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", "*:*"); params.add("facet", "true"); params.add("facet.pivot", "{!stats=s1}place_t,company_t"); params.add("stats", "true"); params.add("stats.field", "{!key=avg_price tag=s1}price_ti"); SolrQueryRequest req = req(params); final String statsPrefix = "//lst[@name='facet_counts']/lst[@name='facet_pivot']/arr[@name='place_t,company_t']/lst"; String dublinMicrosoftStats = statsPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[str[@name='value'][.='microsoft']]/lst[@name='stats']/lst[@name='stats_fields']/lst[@name='avg_price']"; String cardiffPolecatStats = statsPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[str[@name='value'][.='polecat']]/lst[@name='stats']/lst[@name='stats_fields']/lst[@name='avg_price']"; String krakowFujitsuStats = statsPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[str[@name='value'][.='fujitsu']]/lst[@name='stats']/lst[@name='stats_fields']/lst[@name='avg_price']"; assertQ( req, dublinMicrosoftStats + "/double[@name='min'][.=15.0]", dublinMicrosoftStats + "/double[@name='max'][.=29.0]", dublinMicrosoftStats + "/long[@name='count'][.=3]", dublinMicrosoftStats + "/long[@name='missing'][.=1]", dublinMicrosoftStats + "/double[@name='sum'][.=63.0]", dublinMicrosoftStats + "/double[@name='sumOfSquares'][.=1427.0]", dublinMicrosoftStats + "/double[@name='mean'][.=21.0]", dublinMicrosoftStats + "/double[@name='stddev'][.=7.211102550927978]", // if new stats are supported, this will break - update test to assert values for each "count(" + dublinMicrosoftStats + "/*)=8", cardiffPolecatStats + "/double[@name='min'][.=15.0]", cardiffPolecatStats + "/double[@name='max'][.=39.0]", cardiffPolecatStats + "/long[@name='count'][.=2]", cardiffPolecatStats + "/long[@name='missing'][.=1]", cardiffPolecatStats + "/double[@name='sum'][.=54.0]", cardiffPolecatStats + "/double[@name='sumOfSquares'][.=1746.0]", cardiffPolecatStats + "/double[@name='mean'][.=27.0]", cardiffPolecatStats + "/double[@name='stddev'][.=16.97056274847714]", // if new stats are supported, this will break - update test to assert values for each "count(" + cardiffPolecatStats + "/*)=8", krakowFujitsuStats + "/null[@name='min']", krakowFujitsuStats + "/null[@name='max']", krakowFujitsuStats + "/long[@name='count'][.=0]", krakowFujitsuStats + "/long[@name='missing'][.=1]", krakowFujitsuStats + "/double[@name='sum'][.=0.0]", krakowFujitsuStats + "/double[@name='sumOfSquares'][.=0.0]", krakowFujitsuStats + "/double[@name='mean'][.='NaN']", krakowFujitsuStats + "/double[@name='stddev'][.=0.0]", // if new stats are supported, this will break - update test to assert values for each "count(" + krakowFujitsuStats + "/*)=8"); } public void testPivotFacetSortedCount() { index(); final ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", "*:*"); params.add("facet", "true"); params.add("facet.pivot", "place_t,company_t"); // Test sorting by count // TODO clarify why facet count active by default // The default is count if facet.limit is greater than 0, index otherwise, but facet.limit was // not defined params.set(FacetParams.FACET_SORT, FacetParams.FACET_SORT_COUNT); final String facetPivotPrefix = "//lst[@name='facet_counts']/lst[@name='facet_pivot']/arr[@name='place_t,company_t']/lst"; SolrQueryRequest req = req(params); assertQ( req, facetPivotPrefix + "/str[@name='field'][.='place_t']", // dublin facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='microsoft']", facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=4]", facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='polecat']", facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=4]", facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='null']", facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=3]", facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='fujitsu']", facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=2]", facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[5]/str[@name='value'][.='bbc']", facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[5]/int[@name='count'][.=1]", // london facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='null']", facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=3]", facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='polecat']", facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=3]", facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='bbc']", facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=2]", facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='fujitsu']", facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=2]", facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[5]/str[@name='value'][.='microsoft']", facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[5]/int[@name='count'][.=2]", // cardiff facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='polecat']", facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=3]", facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='bbc']", facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=2]", facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='microsoft']", facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=2]", facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='null']", facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=2]", facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[5]/str[@name='value'][.='fujitsu']", facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[5]/int[@name='count'][.=1]", // krakow facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='null']", facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=3]", facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='bbc']", facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=2]", facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='polecat']", facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=2]", facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='fujitsu']", facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=1]", facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[5]/str[@name='value'][.='microsoft']", facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[5]/int[@name='count'][.=1]", // la facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='fujitsu']", facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=2]", facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='microsoft']", facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=2]", facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='null']", facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=2]", facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='polecat']", facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=2]", facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[5]/str[@name='value'][.='bbc']", facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[5]/int[@name='count'][.=1]", // cork facetPivotPrefix + "[str[@name='value'][.='cork']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='fujitsu']", facetPivotPrefix + "[str[@name='value'][.='cork']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=1]", facetPivotPrefix + "[str[@name='value'][.='cork']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='rte']", facetPivotPrefix + "[str[@name='value'][.='cork']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=1]"); } public void testPivotFacetLimit() { index(); final ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", "*:*"); params.add("facet", "true"); params.add("facet.pivot", "place_t,company_t"); params.set(FacetParams.FACET_SORT, FacetParams.FACET_SORT_COUNT); params.set(FacetParams.FACET_LIMIT, 2); final String facetPivotPrefix = "//lst[@name='facet_counts']/lst[@name='facet_pivot']/arr[@name='place_t,company_t']/lst"; SolrQueryRequest req = req(params); assertQ( req, facetPivotPrefix + "/str[@name='field'][.='place_t']", // dublin facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='microsoft']", facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=4]", facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='polecat']", facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=4]", // london facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='null']", facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=3]", facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='polecat']", facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=3]"); } public void testPivotIndividualFacetLimit() { index(); final ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", "*:*"); params.add("facet", "true"); params.add("facet.pivot", "place_t,company_t"); params.set(FacetParams.FACET_SORT, FacetParams.FACET_SORT_COUNT); params.set("f.place_t." + FacetParams.FACET_LIMIT, 1); params.set("f.company_t." + FacetParams.FACET_LIMIT, 4); final String facetPivotPrefix = "//lst[@name='facet_counts']/lst[@name='facet_pivot']/arr[@name='place_t,company_t']/lst"; SolrQueryRequest req = req(params); assertQ( req, facetPivotPrefix + "/str[@name='field'][.='place_t']", // dublin facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='microsoft']", facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=4]", facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='polecat']", facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=4]", facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='null']", facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=3]", facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='fujitsu']", facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=2]"); } public void testPivotFacetMissing() { // Test facet.missing=true with diff sorts index(); indexMissing(); SolrParams missingA = params( "q", "*:*", "rows", "0", "facet", "true", "facet.pivot", "place_t,company_t", // default facet.sort FacetParams.FACET_MISSING, "true"); final String facetPivotPrefix = "//lst[@name='facet_counts']/lst[@name='facet_pivot']/arr[@name='place_t,company_t']/lst"; SolrQueryRequest req = req(missingA); assertQ( req, // not enough values for pivot facetPivotPrefix + "/arr[@name='pivot'][count(.) > 0]", // not the missing place value facetPivotPrefix + "[7]/null[@name='value'][.='']", // wrong missing place count facetPivotPrefix + "[7]/int[@name='count'][.=2]", // not enough sub-pivots for missing place facetPivotPrefix + "[7]/arr[@name='pivot'][count(.) > 0]", // not the missing company value facetPivotPrefix + "[7]/arr[@name='pivot']/lst[6]/null[@name='value'][.='']", // wrong missing company count facetPivotPrefix + "[7]/arr[@name='pivot']/lst[6]/int[@name='count'][.=1]", // company shouldn't have sub-pivots facetPivotPrefix + "[7]/arr[@name='pivot']/lst[6][not(arr[@name='pivot'])]"); SolrParams missingB = SolrParams.wrapDefaults( missingA, params(FacetParams.FACET_LIMIT, "4", "facet.sort", "index")); req = req(missingB); assertQ( req, // not enough values for pivot facetPivotPrefix + "/arr[@name='pivot'][count(.) > 0]", // not the missing place value facetPivotPrefix + "[5]/null[@name='value'][.='']", // wrong missing place count facetPivotPrefix + "[5]/int[@name='count'][.=2]", // not enough sub-pivots for missing place facetPivotPrefix + "[5]/arr[@name='pivot'][count(.) > 0]", // not the missing company value facetPivotPrefix + "[5]/arr[@name='pivot']/lst[5]/null[@name='value'][.='']", // wrong missing company count facetPivotPrefix + "[5]/arr[@name='pivot']/lst[5]/int[@name='count'][.=1]", // company shouldn't have sub-pivots facetPivotPrefix + "[5]/arr[@name='pivot']/lst[5][not(arr[@name='pivot'])]"); SolrParams missingC = SolrParams.wrapDefaults( missingA, params(FacetParams.FACET_LIMIT, "0", "facet.sort", "index")); assertQ( req(missingC), // not enough values for pivot facetPivotPrefix + "/arr[@name='pivot'][count(.) > 0]", // not the missing place value facetPivotPrefix + "[1]/null[@name='value'][.='']", // wrong missing place count facetPivotPrefix + "[1]/int[@name='count'][.=2]", // not enough sub-pivots for missing place facetPivotPrefix + "[1]/arr[@name='pivot'][count(.) > 0]", // not the missing company value facetPivotPrefix + "[1]/arr[@name='pivot']/lst[1]/null[@name='value'][.='']", // wrong missing company count facetPivotPrefix + "[1]/arr[@name='pivot']/lst[1]/int[@name='count'][.=1]", // company shouldn't have sub-pivots facetPivotPrefix + "[1]/arr[@name='pivot']/lst[1][not(arr[@name='pivot'])]"); } public void testPivotFacetIndexSortMincountAndLimit() { // sort=index + mincount + limit index(); indexMissing(); for (SolrParams variableParams : new SolrParams[] { // we should get the same results regardless of overrequest params(), params() }) { SolrParams p = SolrParams.wrapDefaults( params( "q", "*:*", "rows", "0", "facet", "true", "facet.pivot", "company_t", "facet.sort", "index", "facet.pivot.mincount", "4", "facet.limit", "4"), variableParams); final String facetPivotPrefix = "//lst[@name='facet_counts']/lst[@name='facet_pivot']/arr[@name='company_t']"; SolrQueryRequest req = req(p); assertQ( req, facetPivotPrefix + "[count(./lst) = 4]", // not enough values for pivot facetPivotPrefix + "/lst[1]/str[@name='value'][.='fujitsu']", facetPivotPrefix + "/lst[1]/int[@name='count'][.=4]", facetPivotPrefix + "/lst[2]/str[@name='value'][.='microsoft']", facetPivotPrefix + "/lst[2]/int[@name='count'][.=5]", facetPivotPrefix + "/lst[3]/str[@name='value'][.='null']", facetPivotPrefix + "/lst[3]/int[@name='count'][.=6]", facetPivotPrefix + "/lst[4]/str[@name='value'][.='polecat']", facetPivotPrefix + "/lst[4]/int[@name='count'][.=6]"); } } public void testPivotFacetIndexSortMincountLimitAndOffset() { // sort=index + mincount + limit + offset index(); indexMissing(); for (SolrParams variableParams : new SolrParams[] { // we should get the same results regardless of overrequest params(), params() }) { SolrParams p = SolrParams.wrapDefaults( params( "q", "*:*", "rows", "0", "facet", "true", "facet.pivot", "company_t", "facet.sort", "index", "facet.pivot.mincount", "4", "facet.offset", "1", "facet.limit", "4"), variableParams); final String facetPivotPrefix = "//lst[@name='facet_counts']/lst[@name='facet_pivot']/arr[@name='company_t']"; SolrQueryRequest req = req(p); assertQ( req, facetPivotPrefix + "[count(./lst) = 3]", // asked for 4, but not enough meet the mincount facetPivotPrefix + "/lst[1]/str[@name='value'][.='microsoft']", facetPivotPrefix + "/lst[1]/int[@name='count'][.=5]", facetPivotPrefix + "/lst[2]/str[@name='value'][.='null']", facetPivotPrefix + "/lst[2]/int[@name='count'][.=6]", facetPivotPrefix + "/lst[3]/str[@name='value'][.='polecat']", facetPivotPrefix + "/lst[3]/int[@name='count'][.=6]"); } } public void testPivotFacetIndexSortMincountLimitAndOffsetPermutations() { // sort=index + mincount + limit + offset (more permutations) index(); indexMissing(); for (SolrParams variableParams : new SolrParams[] { // all of these combinations should result in the same first value params("facet.pivot.mincount", "4", "facet.offset", "2"), params("facet.pivot.mincount", "5", "facet.offset", "1"), params("facet.pivot.mincount", "6", "facet.offset", "0") }) { SolrParams p = SolrParams.wrapDefaults( params( "q", "*:*", "rows", "0", "facet", "true", "facet.limit", "1", "facet.sort", "index", "facet.overrequest.ratio", "0", "facet.pivot", "company_t"), variableParams); final String facetPivotPrefix = "//lst[@name='facet_counts']/lst[@name='facet_pivot']/arr[@name='company_t']"; SolrQueryRequest req = req(p); assertQ( req, facetPivotPrefix + "[count(./lst) = 1]", // asked for 4, but not enough meet the mincount facetPivotPrefix + "/lst[1]/str[@name='value'][.='null']", facetPivotPrefix + "/lst[1]/int[@name='count'][.=6]"); } } private void indexMissing() { String[] missingDoc = {"id", "777"}; assertU(adoc(missingDoc)); assertU(commit()); } private void index() { // NOTE: we use the literal (4 character) string "null" as a company name // to help ensure there isn't any bugs where the literal string is treated as if it // were a true NULL value. String[] doc = { "id", "19", "place_t", "cardiff dublin", "company_t", "microsoft polecat", "price_ti", "15" }; assertU(adoc(doc)); String[] doc1 = { "id", "20", "place_t", "dublin", "company_t", "polecat microsoft null", "price_ti", "19" }; assertU(adoc(doc1)); String[] doc2 = { "id", "21", "place_t", "london la dublin", "company_t", "microsoft fujitsu null polecat", "price_ti", "29" }; assertU(adoc(doc2)); String[] doc3 = { "id", "22", "place_t", "krakow london cardiff", "company_t", "polecat null bbc", "price_ti", "39" }; assertU(adoc(doc3)); String[] doc4 = {"id", "23", "place_t", "london", "company_t", "", "price_ti", "29"}; assertU(adoc(doc4)); String[] doc5 = {"id", "24", "place_t", "la", "company_t", ""}; assertU(adoc(doc5)); String[] doc6 = { "id", "25", "company_t", "microsoft polecat null fujitsu null bbc", "price_ti", "59" }; assertU(adoc(doc6)); String[] doc7 = {"id", "26", "place_t", "krakow", "company_t", "null"}; assertU(adoc(doc7)); String[] doc8 = { "id", "27", "place_t", "krakow cardiff dublin london la", "company_t", "null microsoft polecat bbc fujitsu" }; assertU(adoc(doc8)); String[] doc9 = {"id", "28", "place_t", "cork", "company_t", "fujitsu rte"}; assertU(adoc(doc9)); assertU(commit()); } }
apache/servicecomb-saga-actuator
35,501
saga-core-akka/src/test/java/org/apache/servicecomb/saga/core/actors/ActorBasedSagaIntegrationTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.servicecomb.saga.core.actors; import static org.apache.servicecomb.saga.core.Transaction.SAGA_END_TRANSACTION; import static org.apache.servicecomb.saga.core.Transaction.SAGA_START_TRANSACTION; import static java.util.Arrays.asList; import static java.util.Collections.emptySet; import static java.util.Collections.singletonList; import static java.util.concurrent.TimeUnit.SECONDS; import static org.hamcrest.CoreMatchers.anyOf; import static org.hamcrest.collection.IsIterableContainingInOrder.contains; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.util.HashSet; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; import org.apache.servicecomb.saga.core.BackwardRecovery; import org.apache.servicecomb.saga.core.Compensation; import org.apache.servicecomb.saga.core.EventEnvelope; import org.apache.servicecomb.saga.core.EventStore; import org.apache.servicecomb.saga.core.Fallback; import org.apache.servicecomb.saga.core.ForwardRecovery; import org.apache.servicecomb.saga.core.IdGenerator; import org.apache.servicecomb.saga.core.LongIdGenerator; import org.apache.servicecomb.saga.core.NoOpSagaRequest; import org.apache.servicecomb.saga.core.Operation; import org.apache.servicecomb.saga.core.PersistentStore; import org.apache.servicecomb.saga.core.Saga; import org.apache.servicecomb.saga.core.SagaDefinition; import org.apache.servicecomb.saga.core.SagaEndedEvent; import org.apache.servicecomb.saga.core.SagaEvent; import org.apache.servicecomb.saga.core.SagaEventMatcher; import org.apache.servicecomb.saga.core.SagaRequest; import org.apache.servicecomb.saga.core.SagaRequestImpl; import org.apache.servicecomb.saga.core.SagaResponse; import org.apache.servicecomb.saga.core.SagaStartedEvent; import org.apache.servicecomb.saga.core.SuccessfulSagaResponse; import org.apache.servicecomb.saga.core.Transaction; import org.apache.servicecomb.saga.core.TransactionAbortedEvent; import org.apache.servicecomb.saga.core.TransactionCompensatedEvent; import org.apache.servicecomb.saga.core.TransactionEndedEvent; import org.apache.servicecomb.saga.core.TransactionStartedEvent; import org.apache.servicecomb.saga.core.application.SagaFactory; import org.hamcrest.CoreMatchers; import org.hamcrest.collection.IsIterableContainingInOrder; import org.junit.After; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.mockito.stubbing.Answer; import com.seanyinx.github.unit.scaffolding.Randomness; import org.apache.servicecomb.saga.core.application.interpreter.FromJsonFormat; import org.apache.servicecomb.saga.infrastructure.EmbeddedEventStore; @SuppressWarnings("unchecked") public class ActorBasedSagaIntegrationTest { private static final String sagaId = Randomness.uniquify("sagaId"); private final FromJsonFormat<Set<String>> childrenExtractor = mock(FromJsonFormat.class); private final IdGenerator<Long> idGenerator = new LongIdGenerator(); private final EventStore eventStore = new EmbeddedEventStore(); private final PersistentStore persistentStore = mock(PersistentStore.class); private final SagaDefinition sagaDefinition = mock(SagaDefinition.class); private final Transaction transaction1 = mock(Transaction.class, "transaction1"); private final Transaction transaction2 = mock(Transaction.class, "transaction2"); private final Transaction transaction3 = mock(Transaction.class, "transaction3"); private final Transaction transaction4 = mock(Transaction.class, "transaction4"); private final Compensation compensation1 = mock(Compensation.class, "compensation1"); private final Compensation compensation2 = mock(Compensation.class, "compensation2"); private final Compensation compensation3 = mock(Compensation.class, "compensation3"); private final Compensation compensation4 = mock(Compensation.class, "compensation4"); private final Fallback fallback1 = mock(Fallback.class, "fallback1"); private final String requestJson = "{}"; private final SagaRequest request1 = request("request1", "service1", transaction1, compensation1, fallback1); private final SagaRequest request2 = request("request2", "service2", transaction2, compensation2, request1.id()); private final SagaRequest request3 = request("request3", "service3", transaction3, compensation3, request1.id()); private final SagaRequest request4 = request("request4", "service4", transaction4, compensation4, request3.id()); private final SagaResponse transactionResponse1 = new SuccessfulSagaResponse("transaction1"); private final SagaResponse transactionResponse2 = new SuccessfulSagaResponse("transaction2"); private final SagaResponse transactionResponse3 = new SuccessfulSagaResponse("transaction3"); private final SagaResponse compensationResponse1 = new SuccessfulSagaResponse("compensation1"); private final SagaResponse compensationResponse2 = new SuccessfulSagaResponse("compensation2"); private final SagaResponse compensationResponse3 = new SuccessfulSagaResponse("compensation3"); @SuppressWarnings("ThrowableInstanceNeverThrown") private final RuntimeException exception = new RuntimeException("oops"); private Saga saga; private final SagaFactory sagaFactory = new ActorBasedSagaFactory(100, persistentStore, childrenExtractor); // root - node1 - node2 - leaf @Before public void setUp() throws Exception { when(sagaDefinition.policy()).thenReturn(new BackwardRecovery()); when(sagaDefinition.requests()).thenReturn(new SagaRequest[]{request1, request2}); when(childrenExtractor.fromJson(anyString())).thenReturn(emptySet()); when(childrenExtractor.fromJson(SagaResponse.NONE_RESPONSE.body())).thenReturn(setOf("none")); when(transaction1.send(request1.serviceName(), SagaResponse.EMPTY_RESPONSE)).thenReturn(transactionResponse1); when(transaction2.send(request2.serviceName(), transactionResponse1)).thenReturn(transactionResponse2); when(transaction3.send(request3.serviceName(), transactionResponse1)).thenReturn(transactionResponse3); when(compensation1.send(request1.serviceName(), compensationResponse2)).thenReturn(compensationResponse1); when(compensation2.send(request2.serviceName(), compensationResponse3)).thenReturn(compensationResponse2); when(compensation3.send(request3.serviceName(), SagaResponse.EMPTY_RESPONSE)).thenReturn(compensationResponse3); } @After public void tearDown() throws Exception { sagaFactory.terminate(); assertTrue(sagaFactory.isTerminated()); } @Test public void transactionsAreRunSuccessfully() { saga = sagaFactory.createSaga(requestJson, sagaId, eventStore, sagaDefinition); saga.run(); assertThat(eventStore, IsIterableContainingInOrder.contains( SagaEventMatcher.eventWith(sagaId, SAGA_START_TRANSACTION, SagaStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction1, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction1, TransactionEndedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction2, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction2, TransactionEndedEvent.class), SagaEventMatcher.eventWith(sagaId, SAGA_END_TRANSACTION, SagaEndedEvent.class) )); verify(transaction1).send(request1.serviceName(), SagaResponse.EMPTY_RESPONSE); verify(transaction2).send(request2.serviceName(), transactionResponse1); verify(compensation1, never()).send(request1.serviceName()); verify(compensation2, never()).send(request2.serviceName()); } // root - node1 - node2 - leaf // \_ node3 _/ @Test public void compensateCommittedTransactionsOnFailure() { when(sagaDefinition.requests()).thenReturn(new SagaRequest[]{request1, request2, request3}); saga = sagaFactory.createSaga(requestJson, sagaId, eventStore, sagaDefinition); // barrier to make sure the two transactions starts at the same time CyclicBarrier barrier = new CyclicBarrier(2); when(transaction2.send(request2.serviceName(), transactionResponse1)) .thenAnswer( withAnswer(() -> { barrier.await(); Thread.sleep(100); throw exception; })); when(transaction3.send(request3.serviceName(), transactionResponse1)) .thenAnswer( withAnswer(() -> { barrier.await(); return transactionResponse3; })); saga.run(); assertThat(eventStore, IsIterableContainingInOrder.contains( SagaEventMatcher.eventWith(sagaId, SAGA_START_TRANSACTION, SagaStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction1, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction1, TransactionEndedEvent.class), CoreMatchers.anyOf(SagaEventMatcher.eventWith(sagaId, transaction2, TransactionStartedEvent.class), SagaEventMatcher .eventWith(sagaId, transaction3, TransactionStartedEvent.class)), CoreMatchers.anyOf(SagaEventMatcher.eventWith(sagaId, transaction2, TransactionStartedEvent.class), SagaEventMatcher .eventWith(sagaId, transaction3, TransactionStartedEvent.class)), SagaEventMatcher.eventWith(sagaId, transaction3, TransactionEndedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction2, TransactionAbortedEvent.class), SagaEventMatcher.eventWith(sagaId, compensation3, TransactionCompensatedEvent.class), SagaEventMatcher.eventWith(sagaId, compensation1, TransactionCompensatedEvent.class), SagaEventMatcher.eventWith(sagaId, Compensation.SAGA_START_COMPENSATION, SagaEndedEvent.class))); verify(transaction1).send(request1.serviceName(), SagaResponse.EMPTY_RESPONSE); verify(transaction2).send(request2.serviceName(), transactionResponse1); verify(transaction3).send(request3.serviceName(), transactionResponse1); verify(compensation1).send(request1.serviceName()); verify(compensation2, never()).send(request2.serviceName()); verify(compensation3).send(request3.serviceName()); } @Test public void skipIgnoredTransaction() throws Exception { when(sagaDefinition.requests()).thenReturn(new SagaRequest[]{request1, request2, request3}); saga = sagaFactory.createSaga(requestJson, sagaId, eventStore, sagaDefinition); when(childrenExtractor.fromJson(transactionResponse1.body())).thenReturn(setOf(request3.id())); saga.run(); assertThat(eventStore, IsIterableContainingInOrder.contains( SagaEventMatcher.eventWith(sagaId, SAGA_START_TRANSACTION, SagaStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction1, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction1, TransactionEndedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction3, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction3, TransactionEndedEvent.class), SagaEventMatcher.eventWith(sagaId, SAGA_END_TRANSACTION, SagaEndedEvent.class) )); verify(transaction1).send(request1.serviceName(), SagaResponse.EMPTY_RESPONSE); verify(transaction3).send(request3.serviceName(), transactionResponse1); verify(transaction2, never()).send(anyString(), any(SagaResponse.class)); verify(compensation1, never()).send(request1.serviceName()); verify(compensation2, never()).send(request2.serviceName()); verify(compensation3, never()).send(request3.serviceName()); } @Test public void skipAllIgnoredTransactions() throws Exception { when(sagaDefinition.requests()).thenReturn(new SagaRequest[]{request1, request2, request3, request4}); saga = sagaFactory.createSaga(requestJson, sagaId, eventStore, sagaDefinition); when(childrenExtractor.fromJson(transactionResponse1.body())).thenReturn(setOf("none")); saga.run(); assertThat(eventStore, IsIterableContainingInOrder.contains( SagaEventMatcher.eventWith(sagaId, SAGA_START_TRANSACTION, SagaStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction1, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction1, TransactionEndedEvent.class), SagaEventMatcher.eventWith(sagaId, SAGA_END_TRANSACTION, SagaEndedEvent.class) )); verify(transaction1).send(request1.serviceName(), SagaResponse.EMPTY_RESPONSE); verify(transaction2, never()).send(anyString(), any(SagaResponse.class)); verify(transaction3, never()).send(anyString(), any(SagaResponse.class)); verify(transaction4, never()).send(anyString(), any(SagaResponse.class)); verify(compensation1, never()).send(request1.serviceName()); verify(compensation2, never()).send(request2.serviceName()); verify(compensation3, never()).send(request3.serviceName()); verify(compensation4, never()).send(request4.serviceName()); } @Test public void doNotCompensateIgnoredTransactions() throws Exception { when(sagaDefinition.requests()).thenReturn(new SagaRequest[]{request1, request2, request3, request4}); saga = sagaFactory.createSaga(requestJson, sagaId, eventStore, sagaDefinition); when(childrenExtractor.fromJson(transactionResponse1.body())).thenReturn(setOf(request3.id())); when(transaction4.send(request4.serviceName(), transactionResponse3)).thenThrow(exception); saga.run(); assertThat(eventStore, IsIterableContainingInOrder.contains( SagaEventMatcher.eventWith(sagaId, SAGA_START_TRANSACTION, SagaStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction1, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction1, TransactionEndedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction3, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction3, TransactionEndedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction4, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction4, TransactionAbortedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction3, TransactionCompensatedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction1, TransactionCompensatedEvent.class), SagaEventMatcher.eventWith(sagaId, Compensation.SAGA_START_COMPENSATION, SagaEndedEvent.class) )); verify(transaction1).send(request1.serviceName(), SagaResponse.EMPTY_RESPONSE); verify(transaction3).send(request3.serviceName(), transactionResponse1); verify(transaction4).send(request4.serviceName(), transactionResponse3); verify(transaction2, never()).send(anyString(), any(SagaResponse.class)); verify(compensation1).send(request1.serviceName()); verify(compensation3).send(request3.serviceName()); verify(compensation2, never()).send(request2.serviceName()); verify(compensation4, never()).send(request4.serviceName()); } // TODO: 2017/10/31 actor will hang and its parent and children will be blocked without its response, timeout must be applied @Ignore // root - node1 - node2 - leaf // \_ node3 _/ @Test public void redoHangingTransactionsOnFailure() throws InterruptedException { when(sagaDefinition.requests()).thenReturn(new SagaRequest[]{request1, request2, request3}); saga = sagaFactory.createSaga(requestJson, sagaId, eventStore, sagaDefinition); // barrier to make sure the two transactions starts at the same time CyclicBarrier barrier = new CyclicBarrier(2); when(transaction3.send(request3.serviceName(), transactionResponse1)) .thenAnswer(withAnswer(() -> { barrier.await(); throw exception; })); CountDownLatch latch = new CountDownLatch(1); when(transaction2.send(request2.serviceName(), transactionResponse1)) .thenAnswer(withAnswer(() -> { barrier.await(); latch.await(1, SECONDS); return transactionResponse2; })).thenReturn(transactionResponse2); saga.run(); // the ordering of events may not be consistence due to concurrent processing of requests assertThat(eventStore, IsIterableContainingInOrder.contains( SagaEventMatcher.eventWith(sagaId, SAGA_START_TRANSACTION, SagaStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction1, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction1, TransactionEndedEvent.class), CoreMatchers.anyOf( SagaEventMatcher.eventWith(sagaId, transaction2, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction3, TransactionStartedEvent.class)), CoreMatchers.anyOf( SagaEventMatcher.eventWith(sagaId, transaction3, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction2, TransactionStartedEvent.class)), SagaEventMatcher.eventWith(sagaId, transaction3, TransactionAbortedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction2, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction2, TransactionEndedEvent.class), SagaEventMatcher.eventWith(sagaId, compensation2, TransactionCompensatedEvent.class), SagaEventMatcher.eventWith(sagaId, compensation1, TransactionCompensatedEvent.class), SagaEventMatcher.eventWith(sagaId, Compensation.SAGA_START_COMPENSATION, SagaEndedEvent.class))); verify(transaction1).send(request1.serviceName(), SagaResponse.EMPTY_RESPONSE); verify(transaction2, times(2)).send(request2.serviceName(), transactionResponse1); verify(transaction3).send(request3.serviceName(), transactionResponse1); verify(compensation1).send(request1.serviceName()); verify(compensation2).send(request2.serviceName()); verify(compensation3, never()).send(request3.serviceName()); latch.countDown(); } @Test public void retriesFailedTransactionTillSuccess() { when(sagaDefinition.policy()).thenReturn(new ForwardRecovery()); saga = sagaFactory.createSaga(requestJson, sagaId, eventStore, sagaDefinition); when(transaction2.send(request2.serviceName(), transactionResponse1)) .thenThrow(exception).thenThrow(exception).thenReturn(transactionResponse2); when(transaction2.retries()).thenReturn(-1); saga.run(); assertThat(eventStore, IsIterableContainingInOrder.contains( SagaEventMatcher.eventWith(sagaId, SAGA_START_TRANSACTION, SagaStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction1, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction1, TransactionEndedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction2, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction2, TransactionEndedEvent.class), SagaEventMatcher.eventWith(sagaId, SAGA_END_TRANSACTION, SagaEndedEvent.class) )); verify(transaction1).send(request1.serviceName(), SagaResponse.EMPTY_RESPONSE); verify(transaction2, times(3)).send(request2.serviceName(), transactionResponse1); verify(compensation1, never()).send(anyString(), any(SagaResponse.class)); verify(compensation2, never()).send(anyString(), any(SagaResponse.class)); } @Test public void fallbackWhenCompensationFailed() { int retries = 3; saga = sagaFactory.createSaga(requestJson, sagaId, eventStore, sagaDefinition); when(transaction2.send(request2.serviceName(), transactionResponse1)).thenThrow(exception); when(compensation1.send(request1.serviceName())).thenThrow(exception); when(compensation1.retries()).thenReturn(retries); saga.run(); verify(transaction1).send(request1.serviceName(), SagaResponse.EMPTY_RESPONSE); verify(transaction2).send(request2.serviceName(), transactionResponse1); verify(compensation1, times(retries + 1)).send(request1.serviceName()); verify(compensation2, never()).send(request2.serviceName()); verify(fallback1).send(request1.serviceName()); } @Test public void restoresSagaToTransactionStateByPlayingAllEvents() { when(sagaDefinition.requests()).thenReturn(new SagaRequest[]{request1, request2, request3}); saga = sagaFactory.createSaga(requestJson, sagaId, eventStore, sagaDefinition); Iterable<EventEnvelope> events = asList( envelope(new SagaStartedEvent(sagaId, requestJson, NoOpSagaRequest.SAGA_START_REQUEST)), envelope(new TransactionStartedEvent(sagaId, request1)), envelope(new TransactionEndedEvent(sagaId, request1, transactionResponse1)), envelope(new TransactionStartedEvent(sagaId, request2)), envelope(new TransactionEndedEvent(sagaId, request2, transactionResponse2)) ); eventStore.populate(events); saga.play(); saga.run(); assertThat(eventStore, IsIterableContainingInOrder.contains( SagaEventMatcher.eventWith(sagaId, SAGA_START_TRANSACTION, SagaStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction1, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction1, TransactionEndedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction2, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction2, TransactionEndedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction3, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction3, TransactionEndedEvent.class), SagaEventMatcher.eventWith(sagaId, SAGA_END_TRANSACTION, SagaEndedEvent.class) )); verify(transaction1, never()).send(anyString(), any(SagaResponse.class)); verify(transaction2, never()).send(anyString(), any(SagaResponse.class)); verify(transaction3).send(request3.serviceName(), transactionResponse1); verify(compensation1, never()).send(request1.serviceName()); verify(compensation2, never()).send(request2.serviceName()); verify(compensation3, never()).send(request3.serviceName()); } @Test public void restoresPartialTransactionByPlayingAllEvents() { when(sagaDefinition.requests()).thenReturn(new SagaRequest[]{request1, request2, request3}); saga = sagaFactory.createSaga(requestJson, sagaId, eventStore, sagaDefinition); Iterable<EventEnvelope> events = asList( envelope(new SagaStartedEvent(sagaId, requestJson, NoOpSagaRequest.SAGA_START_REQUEST)), envelope(new TransactionStartedEvent(sagaId, request1)), envelope(new TransactionEndedEvent(sagaId, request1, transactionResponse1)), envelope(new TransactionStartedEvent(sagaId, request2)), envelope(new TransactionEndedEvent(sagaId, request2, transactionResponse2)), envelope(new TransactionStartedEvent(sagaId, request3)) ); eventStore.populate(events); saga.play(); saga.run(); assertThat(eventStore, IsIterableContainingInOrder.contains( SagaEventMatcher.eventWith(sagaId, SAGA_START_TRANSACTION, SagaStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction1, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction1, TransactionEndedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction2, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction2, TransactionEndedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction3, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction3, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction3, TransactionEndedEvent.class), SagaEventMatcher.eventWith(sagaId, SAGA_END_TRANSACTION, SagaEndedEvent.class) )); verify(transaction1, never()).send(anyString(), any(SagaResponse.class)); verify(transaction2, never()).send(anyString(), any(SagaResponse.class)); verify(transaction3).send(request3.serviceName(), transactionResponse1); verify(compensation1, never()).send(request1.serviceName()); verify(compensation2, never()).send(request2.serviceName()); verify(compensation3, never()).send(request3.serviceName()); } @Test public void restoresToCompensationFromAbortedTransactionByPlayingAllEvents() { when(sagaDefinition.requests()).thenReturn(new SagaRequest[]{request1, request2, request3}); saga = sagaFactory.createSaga(requestJson, sagaId, eventStore, sagaDefinition); Iterable<EventEnvelope> events = asList( envelope(new SagaStartedEvent(sagaId, requestJson, NoOpSagaRequest.SAGA_START_REQUEST)), envelope(new TransactionStartedEvent(sagaId, request1)), envelope(new TransactionEndedEvent(sagaId, request1)), envelope(new TransactionStartedEvent(sagaId, request2)), envelope(new TransactionEndedEvent(sagaId, request2)), envelope(new TransactionStartedEvent(sagaId, request3)), envelope(new TransactionAbortedEvent(sagaId, request3, exception)) ); eventStore.populate(events); saga.play(); saga.run(); assertThat(eventStore, IsIterableContainingInOrder.contains( SagaEventMatcher.eventWith(sagaId, SAGA_START_TRANSACTION, SagaStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction1, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction1, TransactionEndedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction2, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction2, TransactionEndedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction3, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction3, TransactionAbortedEvent.class), SagaEventMatcher.eventWith(sagaId, compensation2, TransactionCompensatedEvent.class), SagaEventMatcher.eventWith(sagaId, compensation1, TransactionCompensatedEvent.class), SagaEventMatcher.eventWith(sagaId, Compensation.SAGA_START_COMPENSATION, SagaEndedEvent.class) )); verify(transaction1, never()).send(anyString(), any(SagaResponse.class)); verify(transaction2, never()).send(anyString(), any(SagaResponse.class)); verify(transaction3, never()).send(anyString(), any(SagaResponse.class)); verify(compensation1).send(request1.serviceName()); verify(compensation2).send(request2.serviceName()); verify(compensation3, never()).send(request3.serviceName()); } @Test public void restoresSagaToCompensationStateByPlayingAllEvents() { when(sagaDefinition.requests()).thenReturn(new SagaRequest[]{request1, request2, request3}); saga = sagaFactory.createSaga(requestJson, sagaId, eventStore, sagaDefinition); Iterable<EventEnvelope> events = asList( envelope(new SagaStartedEvent(sagaId, requestJson, NoOpSagaRequest.SAGA_START_REQUEST)), envelope(new TransactionStartedEvent(sagaId, request1)), envelope(new TransactionEndedEvent(sagaId, request1)), envelope(new TransactionStartedEvent(sagaId, request2)), envelope(new TransactionEndedEvent(sagaId, request2)), envelope(new TransactionStartedEvent(sagaId, request3)), envelope(new TransactionAbortedEvent(sagaId, request3, exception)), envelope(new TransactionCompensatedEvent(sagaId, request2)) ); eventStore.populate(events); saga.play(); saga.run(); assertThat(eventStore, IsIterableContainingInOrder.contains( SagaEventMatcher.eventWith(sagaId, SAGA_START_TRANSACTION, SagaStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction1, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction1, TransactionEndedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction2, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction2, TransactionEndedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction3, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction3, TransactionAbortedEvent.class), SagaEventMatcher.eventWith(sagaId, compensation2, TransactionCompensatedEvent.class), SagaEventMatcher.eventWith(sagaId, compensation1, TransactionCompensatedEvent.class), SagaEventMatcher.eventWith(sagaId, Compensation.SAGA_START_COMPENSATION, SagaEndedEvent.class) )); verify(transaction1, never()).send(anyString(), any(SagaResponse.class)); verify(transaction2, never()).send(anyString(), any(SagaResponse.class)); verify(transaction3, never()).send(anyString(), any(SagaResponse.class)); verify(compensation1).send(request1.serviceName()); verify(compensation2, never()).send(request2.serviceName()); verify(compensation3, never()).send(request3.serviceName()); } @Test public void restoresPartialCompensationByPlayingAllEvents() { when(sagaDefinition.requests()).thenReturn(new SagaRequest[]{request1, request2, request3}); saga = sagaFactory.createSaga(requestJson, sagaId, eventStore, sagaDefinition); Iterable<EventEnvelope> events = asList( envelope(new SagaStartedEvent(sagaId, requestJson, NoOpSagaRequest.SAGA_START_REQUEST)), envelope(new TransactionStartedEvent(sagaId, request1)), envelope(new TransactionEndedEvent(sagaId, request1)), envelope(new TransactionStartedEvent(sagaId, request2)), envelope(new TransactionEndedEvent(sagaId, request2)), envelope(new TransactionStartedEvent(sagaId, request3)), envelope(new TransactionAbortedEvent(sagaId, request3, exception)), envelope(new TransactionCompensatedEvent(sagaId, request2)) ); eventStore.populate(events); saga.play(); saga.run(); assertThat(eventStore, IsIterableContainingInOrder.contains( SagaEventMatcher.eventWith(sagaId, SAGA_START_TRANSACTION, SagaStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction1, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction1, TransactionEndedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction2, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction2, TransactionEndedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction3, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction3, TransactionAbortedEvent.class), SagaEventMatcher.eventWith(sagaId, compensation2, TransactionCompensatedEvent.class), SagaEventMatcher.eventWith(sagaId, compensation1, TransactionCompensatedEvent.class), SagaEventMatcher.eventWith(sagaId, Compensation.SAGA_START_COMPENSATION, SagaEndedEvent.class) )); verify(transaction1, never()).send(anyString(), any(SagaResponse.class)); verify(transaction2, never()).send(anyString(), any(SagaResponse.class)); verify(transaction3, never()).send(anyString(), any(SagaResponse.class)); verify(compensation1).send(request1.serviceName()); verify(compensation2, never()).send(request2.serviceName()); verify(compensation3, never()).send(request3.serviceName()); } @Test public void restoresSagaToEndStateByPlayingAllEvents() { saga = sagaFactory.createSaga(requestJson, sagaId, eventStore, sagaDefinition); Iterable<EventEnvelope> events = asList( envelope(new SagaStartedEvent(sagaId, requestJson, NoOpSagaRequest.SAGA_START_REQUEST)), envelope(new TransactionStartedEvent(sagaId, request1)), envelope(new TransactionEndedEvent(sagaId, request1)), envelope(new TransactionStartedEvent(sagaId, request2)), envelope(new TransactionEndedEvent(sagaId, request2)) ); eventStore.populate(events); saga.play(); saga.run(); assertThat(eventStore, IsIterableContainingInOrder.contains( SagaEventMatcher.eventWith(sagaId, SAGA_START_TRANSACTION, SagaStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction1, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction1, TransactionEndedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction2, TransactionStartedEvent.class), SagaEventMatcher.eventWith(sagaId, transaction2, TransactionEndedEvent.class), SagaEventMatcher.eventWith(sagaId, SAGA_END_TRANSACTION, SagaEndedEvent.class) )); verify(transaction1, never()).send(anyString(), any(SagaResponse.class)); verify(transaction2, never()).send(anyString(), any(SagaResponse.class)); verify(compensation1, never()).send(request1.serviceName()); verify(compensation2, never()).send(request2.serviceName()); } @Test public void failFastIfSagaLogIsDown() throws Exception { EventStore sagaLog = mock(EventStore.class); saga = sagaFactory.createSaga(requestJson, sagaId, sagaLog, sagaDefinition); doThrow(RuntimeException.class).when(sagaLog).offer(any(SagaStartedEvent.class)); saga.run(); verify(sagaLog).offer(any(SagaStartedEvent.class)); verify(transaction1, never()).send(anyString(), any(SagaResponse.class)); verify(transaction2, never()).send(anyString(), any(SagaResponse.class)); verify(compensation1, never()).send(request1.serviceName()); verify(compensation2, never()).send(request2.serviceName()); } private Answer<SagaResponse> withAnswer(Callable<SagaResponse> callable) { return invocationOnMock -> callable.call(); } private EventEnvelope envelope(SagaEvent event) { return new EventEnvelope(idGenerator.nextId(), event); } private SagaRequest request(String requestId, String serviceName, Transaction transaction, Compensation compensation, String... parentIds) { return new SagaRequestImpl(requestId, serviceName, Operation.TYPE_REST, transaction, compensation, parentIds); } private SagaRequest request(String requestId, String serviceName, Transaction transaction, Compensation compensation, Fallback fallback) { return new SagaRequestImpl(requestId, serviceName, Operation.TYPE_REST, transaction, compensation, fallback); } private HashSet<String> setOf(String requestId) { return new HashSet<>(singletonList(requestId)); } }
googleapis/google-cloud-java
33,769
java-recaptchaenterprise/proto-google-cloud-recaptchaenterprise-v1beta1/src/main/java/com/google/recaptchaenterprise/v1beta1/RecaptchaEnterpriseProto.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/recaptchaenterprise/v1beta1/recaptchaenterprise.proto // Protobuf Java Version: 3.25.8 package com.google.recaptchaenterprise.v1beta1; public final class RecaptchaEnterpriseProto { private RecaptchaEnterpriseProto() {} public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {} public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) { registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry); } static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_recaptchaenterprise_v1beta1_CreateAssessmentRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_recaptchaenterprise_v1beta1_CreateAssessmentRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionEvent_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionEvent_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_recaptchaenterprise_v1beta1_AnnotateAssessmentRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_recaptchaenterprise_v1beta1_AnnotateAssessmentRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_recaptchaenterprise_v1beta1_AnnotateAssessmentResponse_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_recaptchaenterprise_v1beta1_AnnotateAssessmentResponse_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_recaptchaenterprise_v1beta1_PasswordLeakVerification_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_recaptchaenterprise_v1beta1_PasswordLeakVerification_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_recaptchaenterprise_v1beta1_Assessment_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_recaptchaenterprise_v1beta1_Assessment_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_recaptchaenterprise_v1beta1_Event_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_recaptchaenterprise_v1beta1_Event_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionData_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionData_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionData_Address_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionData_Address_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionData_User_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionData_User_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionData_Item_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionData_Item_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionData_GatewayInfo_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionData_GatewayInfo_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_recaptchaenterprise_v1beta1_TokenProperties_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_recaptchaenterprise_v1beta1_TokenProperties_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_recaptchaenterprise_v1beta1_FraudPreventionAssessment_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_recaptchaenterprise_v1beta1_FraudPreventionAssessment_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_recaptchaenterprise_v1beta1_FraudPreventionAssessment_StolenInstrumentVerdict_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_recaptchaenterprise_v1beta1_FraudPreventionAssessment_StolenInstrumentVerdict_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_recaptchaenterprise_v1beta1_FraudPreventionAssessment_CardTestingVerdict_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_recaptchaenterprise_v1beta1_FraudPreventionAssessment_CardTestingVerdict_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_recaptchaenterprise_v1beta1_FraudPreventionAssessment_BehavioralTrustVerdict_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_recaptchaenterprise_v1beta1_FraudPreventionAssessment_BehavioralTrustVerdict_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_recaptchaenterprise_v1beta1_AccountDefenderAssessment_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_recaptchaenterprise_v1beta1_AccountDefenderAssessment_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n" + "Bgoogle/cloud/recaptchaenterprise/v1beta1/recaptchaenterprise.proto\022(google.clo" + "ud.recaptchaenterprise.v1beta1\032\034google/a" + "pi/annotations.proto\032\027google/api/client." + "proto\032\037google/api/field_behavior.proto\032\031" + "google/api/resource.proto\032\037google/protobuf/timestamp.proto\"\255\001\n" + "\027CreateAssessmentRequest\022C\n" + "\006parent\030\001 \001(\tB3\340A\002\372A-\n" + "+cloudresourcemanager.googleapis.com/Project\022M\n\n" + "assessment\030\002" + " \001(\01324.google.cloud.recaptchaenterprise.v1beta1.AssessmentB\003\340A\002\"\227\005\n" + "\020TransactionEvent\022h\n\n" + "event_type\030\001 \001(\0162O.google.cloud.recaptchaenterprise.v1beta1.T" + "ransactionEvent.TransactionEventTypeB\003\340A\001\022\023\n" + "\006reason\030\002 \001(\tB\003\340A\001\022\022\n" + "\005value\030\003 \001(\001B\003\340A\001\0223\n\n" + "event_time\030\004 \001(\0132\032.google.protobuf.TimestampB\003\340A\001\"\272\003\n" + "\024TransactionEventType\022&\n" + "\"TRANSACTION_EVENT_TYPE_UNSPECIFIED\020\000\022\024\n" + "\020MERCHANT_APPROVE\020\001\022\021\n\r" + "MERCHANT_DENY\020\002\022\021\n\r" + "MANUAL_REVIEW\020\003\022\021\n\r" + "AUTHORIZATION\020\004\022\031\n" + "\025AUTHORIZATION_DECLINE\020\005\022\023\n" + "\017PAYMENT_CAPTURE\020\006\022\033\n" + "\027PAYMENT_CAPTURE_DECLINE\020\007\022\n\n" + "\006CANCEL\020\010\022\026\n" + "\022CHARGEBACK_INQUIRY\020\t\022\024\n" + "\020CHARGEBACK_ALERT\020\n" + "\022\026\n" + "\022FRAUD_NOTIFICATION\020\013\022\016\n\n" + "CHARGEBACK\020\014\022\034\n" + "\030CHARGEBACK_REPRESENTMENT\020\r" + "\022\026\n" + "\022CHARGEBACK_REVERSE\020\016\022\022\n" + "\016REFUND_REQUEST\020\017\022\022\n" + "\016REFUND_DECLINE\020\020\022\n\n" + "\006REFUND\020\021\022\022\n" + "\016REFUND_REVERSE\020\022\"\371\006\n" + "\031AnnotateAssessmentRequest\022C\n" + "\004name\030\001 \001(\tB5\340A\002\372A/\n" + "-recaptchaenterprise.googleapis.com/Assessment\022g\n\n" + "annotation\030\002 \001(\0162N.google.cloud.recap" + "tchaenterprise.v1beta1.AnnotateAssessmentRequest.AnnotationB\003\340A\001\022`\n" + "\007reasons\030\003 \003(\0162J.google.cloud.recaptchaenterprise.v1b" + "eta1.AnnotateAssessmentRequest.ReasonB\003\340A\001\022\036\n" + "\021hashed_account_id\030\004 \001(\014B\003\340A\001\022Z\n" + "\021transaction_event\030\005 \001(\0132:.google.cloud.rec" + "aptchaenterprise.v1beta1.TransactionEventB\003\340A\001\"~\n\n" + "Annotation\022\032\n" + "\026ANNOTATION_UNSPECIFIED\020\000\022\016\n\n" + "LEGITIMATE\020\001\022\016\n\n" + "FRAUDULENT\020\002\022\030\n" + "\020PASSWORD_CORRECT\020\003\032\002\010\001\022\032\n" + "\022PASSWORD_INCORRECT\020\004\032\002\010\001\"\317\002\n" + "\006Reason\022\026\n" + "\022REASON_UNSPECIFIED\020\000\022\016\n\n" + "CHARGEBACK\020\001\022\024\n" + "\020CHARGEBACK_FRAUD\020\010\022\026\n" + "\022CHARGEBACK_DISPUTE\020\t\022\n\n" + "\006REFUND\020\n" + "\022\020\n" + "\014REFUND_FRAUD\020\013\022\030\n" + "\024TRANSACTION_ACCEPTED\020\014\022\030\n" + "\024TRANSACTION_DECLINED\020\r" + "\022\026\n" + "\022PAYMENT_HEURISTICS\020\002\022\030\n" + "\024INITIATED_TWO_FACTOR\020\007\022\025\n" + "\021PASSED_TWO_FACTOR\020\003\022\025\n" + "\021FAILED_TWO_FACTOR\020\004\022\024\n" + "\020CORRECT_PASSWORD\020\005\022\026\n" + "\022INCORRECT_PASSWORD\020\006\022\017\n" + "\013SOCIAL_SPAM\020\016\"\034\n" + "\032AnnotateAssessmentResponse\"\206\001\n" + "\030PasswordLeakVerification\022$\n" + "\027hashed_user_credentials\030\001 \001(\014B\003\340A\001\022\037\n" + "\022credentials_leaked\030\002 \001(\010B\003\340A\003\022#\n" + "\026canonicalized_username\030\003 \001(\tB\003\340A\001\"\267\007\n\n" + "Assessment\022\021\n" + "\004name\030\001 \001(\tB\003\340A\003\022>\n" + "\005event\030\002 \001(\0132/.google.cloud.recaptchaenterprise.v1beta1.Event\022\022\n" + "\005score\030\003 \001(\002B\003\340A\003\022X\n" + "\020token_properties\030\004 \001(\01329.google.cloud.r" + "ecaptchaenterprise.v1beta1.TokenPropertiesB\003\340A\003\022_\n" + "\007reasons\030\005 \003(\0162I.google.cloud." + "recaptchaenterprise.v1beta1.Assessment.ClassificationReasonB\003\340A\003\022f\n" + "\032password_leak_verification\030\007 \001(\0132B.google.cloud.reca" + "ptchaenterprise.v1beta1.PasswordLeakVerification\022h\n" + "\033account_defender_assessment\030\010" + " \001(\0132C.google.cloud.recaptchaenterprise.v1beta1.AccountDefenderAssessment\022h\n" + "\033fraud_prevention_assessment\030\013 \001(\0132C.google" + ".cloud.recaptchaenterprise.v1beta1.FraudPreventionAssessment\"\351\001\n" + "\024ClassificationReason\022%\n" + "!CLASSIFICATION_REASON_UNSPECIFIED\020\000\022\016\n\n" + "AUTOMATION\020\001\022\032\n" + "\026UNEXPECTED_ENVIRONMENT\020\002\022\024\n" + "\020TOO_MUCH_TRAFFIC\020\003\022\035\n" + "\031UNEXPECTED_USAGE_PATTERNS\020\004\022\030\n" + "\024LOW_CONFIDENCE_SCORE\020\005\022\025\n" + "\021SUSPECTED_CARDING\020\006\022\030\n" + "\024SUSPECTED_CHARGEBACK\020\007:_\352A\\\n" + "-recaptchaenterpri" + "se.googleapis.com/Assessment\022+projects/{project}/assessments/{assessment}\"\261\003\n" + "\005Event\022\022\n" + "\005token\030\001 \001(\tB\003\340A\001\022\025\n" + "\010site_key\030\002 \001(\tB\003\340A\001\022\027\n\n" + "user_agent\030\003 \001(\tB\003\340A\001\022\034\n" + "\017user_ip_address\030\004 \001(\tB\003\340A\001\022\034\n" + "\017expected_action\030\005 \001(\tB\003\340A\001\022\036\n" + "\021hashed_account_id\030\006 \001(\014B\003\340A\001\022X\n" + "\020transaction_data\030\r" + " \001(\01329.google.c" + "loud.recaptchaenterprise.v1beta1.TransactionDataB\003\340A\001\022^\n" + "\020fraud_prevention\030\021 \001(\0162" + "?.google.cloud.recaptchaenterprise.v1beta1.Event.FraudPreventionB\003\340A\001\"N\n" + "\017FraudPrevention\022 \n" + "\034FRAUD_PREVENTION_UNSPECIFIED\020\000\022\013\n" + "\007ENABLED\020\001\022\014\n" + "\010DISABLED\020\002\"\235\t\n" + "\017TransactionData\022\033\n" + "\016transaction_id\030\013 \001(\tH\000\210\001\001\022\026\n" + "\016payment_method\030\001 \001(\t\022\020\n" + "\010card_bin\030\002 \001(\t\022\026\n" + "\016card_last_four\030\003 \001(\t\022\025\n\r" + "currency_code\030\004 \001(\t\022\r\n" + "\005value\030\005 \001(\001\022\026\n" + "\016shipping_value\030\014 \001(\001\022[\n" + "\020shipping_address\030\006 \001(\0132A.googl" + "e.cloud.recaptchaenterprise.v1beta1.TransactionData.Address\022Z\n" + "\017billing_address\030\007" + " \001(\0132A.google.cloud.recaptchaenterprise.v1beta1.TransactionData.Address\022L\n" + "\004user\030\010" + " \001(\0132>.google.cloud.recaptchaenterprise.v1beta1.TransactionData.User\022Q\n" + "\tmerchants\030\r" + " \003(\0132>.google.cloud.recaptchaenterprise.v1beta1.TransactionData.User\022M\n" + "\005items\030\016" + " \003(\0132>.google.cloud.recaptchaenterprise.v1beta1.TransactionData.Item\022[\n" + "\014gateway_info\030\n" + " \001(\0132E.google.cloud.recaptchaen" + "terprise.v1beta1.TransactionData.GatewayInfo\032\206\001\n" + "\007Address\022\021\n" + "\trecipient\030\001 \001(\t\022\017\n" + "\007address\030\002 \003(\t\022\020\n" + "\010locality\030\003 \001(\t\022\033\n" + "\023administrative_area\030\004 \001(\t\022\023\n" + "\013region_code\030\005 \001(\t\022\023\n" + "\013postal_code\030\006 \001(\t\032\204\001\n" + "\004User\022\022\n\n" + "account_id\030\006 \001(\t\022\023\n" + "\013creation_ms\030\001 \001(\003\022\r\n" + "\005email\030\002 \001(\t\022\026\n" + "\016email_verified\030\003 \001(\010\022\024\n" + "\014phone_number\030\004 \001(\t\022\026\n" + "\016phone_verified\030\005 \001(\010\032R\n" + "\004Item\022\014\n" + "\004name\030\001 \001(\t\022\r\n" + "\005value\030\002 \001(\001\022\020\n" + "\010quantity\030\003 \001(\003\022\033\n" + "\023merchant_account_id\030\004 \001(\t\032p\n" + "\013GatewayInfo\022\014\n" + "\004name\030\001 \001(\t\022\035\n" + "\025gateway_response_code\030\002 \001(\t\022\031\n" + "\021avs_response_code\030\003 \001(\t\022\031\n" + "\021cvv_response_code\030\004 \001(\tB\021\n" + "\017_transaction_id\"\377\002\n" + "\017TokenProperties\022\r\n" + "\005valid\030\001 \001(\010\022_\n" + "\016invalid_reason\030\002 \001(\0162G.googl" + "e.cloud.recaptchaenterprise.v1beta1.TokenProperties.InvalidReason\022/\n" + "\013create_time\030\003 \001(\0132\032.google.protobuf.Timestamp\022\020\n" + "\010hostname\030\004 \001(\t\022\016\n" + "\006action\030\005 \001(\t\"\250\001\n\r" + "InvalidReason\022\036\n" + "\032INVALID_REASON_UNSPECIFIED\020\000\022\032\n" + "\026UNKNOWN_INVALID_REASON\020\001\022\r\n" + "\tMALFORMED\020\002\022\013\n" + "\007EXPIRED\020\003\022\010\n" + "\004DUPE\020\004\022\025\n\r" + "SITE_MISMATCH\020\005\032\002\010\001\022\013\n" + "\007MISSING\020\006\022\021\n\r" + "BROWSER_ERROR\020\007\"\304\004\n" + "\031FraudPreventionAssessment\022\035\n" + "\020transaction_risk\030\001 \001(\002B\003\340A\003\022\203\001\n" + "\031stolen_instrument_verdict\030\002 \001(\0132[.google.cloud.recaptch" + "aenterprise.v1beta1.FraudPreventionAssessment.StolenInstrumentVerdictB\003\340A\003\022y\n" + "\024card_testing_verdict\030\003 \001(\0132V.google.cloud." + "recaptchaenterprise.v1beta1.FraudPrevent" + "ionAssessment.CardTestingVerdictB\003\340A\003\022\201\001\n" + "\030behavioral_trust_verdict\030\004 \001(\0132Z.googl" + "e.cloud.recaptchaenterprise.v1beta1.Frau" + "dPreventionAssessment.BehavioralTrustVerdictB\003\340A\003\032,\n" + "\027StolenInstrumentVerdict\022\021\n" + "\004risk\030\001 \001(\002B\003\340A\003\032\'\n" + "\022CardTestingVerdict\022\021\n" + "\004risk\030\001 \001(\002B\003\340A\003\032,\n" + "\026BehavioralTrustVerdict\022\022\n" + "\005trust\030\001 \001(\002B\003\340A\003\"\273\002\n" + "\031AccountDefenderAssessment\022h\n" + "\006labels\030\001 \003(\0162X.google.cloud.recaptchaenterprise.v1beta1.AccountD" + "efenderAssessment.AccountDefenderLabel\"\263\001\n" + "\024AccountDefenderLabel\022&\n" + "\"ACCOUNT_DEFENDER_LABEL_UNSPECIFIED\020\000\022\021\n\r" + "PROFILE_MATCH\020\001\022\035\n" + "\031SUSPICIOUS_LOGIN_ACTIVITY\020\002\022\037\n" + "\033SUSPICIOUS_ACCOUNT_CREATION\020\003\022 \n" + "\034RELATED_ACCOUNTS_NUMBER_HIGH\020\0042\315\004\n" + "!RecaptchaEnterpriseServiceV1Beta1\022\335\001\n" + "\020CreateAssessment\022A.google.cloud.recaptchaenterprise.v1bet" + "a1.CreateAssessmentRequest\0324.google.cloud.recaptchaenterprise.v1beta1.Assessment" + "\"P\332A\021parent,assessment\202\323\344\223\0026\"(/v1beta1/{parent=projects/*}/assessments:\n" + "assessment\022\357\001\n" + "\022AnnotateAssessment\022C.google.cloud.recaptchaenterprise.v1beta1.AnnotateAss" + "essmentRequest\032D.google.cloud.recaptchaenterprise.v1beta1.AnnotateAssessmentResp" + "onse\"N\332A\017name,annotation\202\323\344\223\0026\"1/v1beta1" + "/{name=projects/*/assessments/*}:annotat" + "e:\001*\032V\312A\"recaptchaenterprise.googleapis." + "com\322A.https://www.googleapis.com/auth/cloud-platformB\262\002\n" + "&com.google.recaptchaenterprise.v1beta1B\030RecaptchaEnterpriseProt" + "oP\001Zacloud.google.com/go/recaptchaenterprise/v2/apiv1beta1/recaptchaenterprisepb" + ";recaptchaenterprisepb\242\002\004GCRE\252\002(Google.C" + "loud.RecaptchaEnterprise.V1Beta1\312\002(Googl" + "e\\Cloud\\RecaptchaEnterprise\\V1beta1\352\002+Go" + "ogle::Cloud::RecaptchaEnterprise::V1beta1b\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { com.google.api.AnnotationsProto.getDescriptor(), com.google.api.ClientProto.getDescriptor(), com.google.api.FieldBehaviorProto.getDescriptor(), com.google.api.ResourceProto.getDescriptor(), com.google.protobuf.TimestampProto.getDescriptor(), }); internal_static_google_cloud_recaptchaenterprise_v1beta1_CreateAssessmentRequest_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_google_cloud_recaptchaenterprise_v1beta1_CreateAssessmentRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_recaptchaenterprise_v1beta1_CreateAssessmentRequest_descriptor, new java.lang.String[] { "Parent", "Assessment", }); internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionEvent_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionEvent_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionEvent_descriptor, new java.lang.String[] { "EventType", "Reason", "Value", "EventTime", }); internal_static_google_cloud_recaptchaenterprise_v1beta1_AnnotateAssessmentRequest_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_google_cloud_recaptchaenterprise_v1beta1_AnnotateAssessmentRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_recaptchaenterprise_v1beta1_AnnotateAssessmentRequest_descriptor, new java.lang.String[] { "Name", "Annotation", "Reasons", "HashedAccountId", "TransactionEvent", }); internal_static_google_cloud_recaptchaenterprise_v1beta1_AnnotateAssessmentResponse_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_google_cloud_recaptchaenterprise_v1beta1_AnnotateAssessmentResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_recaptchaenterprise_v1beta1_AnnotateAssessmentResponse_descriptor, new java.lang.String[] {}); internal_static_google_cloud_recaptchaenterprise_v1beta1_PasswordLeakVerification_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_google_cloud_recaptchaenterprise_v1beta1_PasswordLeakVerification_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_recaptchaenterprise_v1beta1_PasswordLeakVerification_descriptor, new java.lang.String[] { "HashedUserCredentials", "CredentialsLeaked", "CanonicalizedUsername", }); internal_static_google_cloud_recaptchaenterprise_v1beta1_Assessment_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_google_cloud_recaptchaenterprise_v1beta1_Assessment_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_recaptchaenterprise_v1beta1_Assessment_descriptor, new java.lang.String[] { "Name", "Event", "Score", "TokenProperties", "Reasons", "PasswordLeakVerification", "AccountDefenderAssessment", "FraudPreventionAssessment", }); internal_static_google_cloud_recaptchaenterprise_v1beta1_Event_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_google_cloud_recaptchaenterprise_v1beta1_Event_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_recaptchaenterprise_v1beta1_Event_descriptor, new java.lang.String[] { "Token", "SiteKey", "UserAgent", "UserIpAddress", "ExpectedAction", "HashedAccountId", "TransactionData", "FraudPrevention", }); internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionData_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionData_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionData_descriptor, new java.lang.String[] { "TransactionId", "PaymentMethod", "CardBin", "CardLastFour", "CurrencyCode", "Value", "ShippingValue", "ShippingAddress", "BillingAddress", "User", "Merchants", "Items", "GatewayInfo", }); internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionData_Address_descriptor = internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionData_descriptor .getNestedTypes() .get(0); internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionData_Address_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionData_Address_descriptor, new java.lang.String[] { "Recipient", "Address", "Locality", "AdministrativeArea", "RegionCode", "PostalCode", }); internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionData_User_descriptor = internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionData_descriptor .getNestedTypes() .get(1); internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionData_User_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionData_User_descriptor, new java.lang.String[] { "AccountId", "CreationMs", "Email", "EmailVerified", "PhoneNumber", "PhoneVerified", }); internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionData_Item_descriptor = internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionData_descriptor .getNestedTypes() .get(2); internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionData_Item_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionData_Item_descriptor, new java.lang.String[] { "Name", "Value", "Quantity", "MerchantAccountId", }); internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionData_GatewayInfo_descriptor = internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionData_descriptor .getNestedTypes() .get(3); internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionData_GatewayInfo_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_recaptchaenterprise_v1beta1_TransactionData_GatewayInfo_descriptor, new java.lang.String[] { "Name", "GatewayResponseCode", "AvsResponseCode", "CvvResponseCode", }); internal_static_google_cloud_recaptchaenterprise_v1beta1_TokenProperties_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_google_cloud_recaptchaenterprise_v1beta1_TokenProperties_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_recaptchaenterprise_v1beta1_TokenProperties_descriptor, new java.lang.String[] { "Valid", "InvalidReason", "CreateTime", "Hostname", "Action", }); internal_static_google_cloud_recaptchaenterprise_v1beta1_FraudPreventionAssessment_descriptor = getDescriptor().getMessageTypes().get(9); internal_static_google_cloud_recaptchaenterprise_v1beta1_FraudPreventionAssessment_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_recaptchaenterprise_v1beta1_FraudPreventionAssessment_descriptor, new java.lang.String[] { "TransactionRisk", "StolenInstrumentVerdict", "CardTestingVerdict", "BehavioralTrustVerdict", }); internal_static_google_cloud_recaptchaenterprise_v1beta1_FraudPreventionAssessment_StolenInstrumentVerdict_descriptor = internal_static_google_cloud_recaptchaenterprise_v1beta1_FraudPreventionAssessment_descriptor .getNestedTypes() .get(0); internal_static_google_cloud_recaptchaenterprise_v1beta1_FraudPreventionAssessment_StolenInstrumentVerdict_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_recaptchaenterprise_v1beta1_FraudPreventionAssessment_StolenInstrumentVerdict_descriptor, new java.lang.String[] { "Risk", }); internal_static_google_cloud_recaptchaenterprise_v1beta1_FraudPreventionAssessment_CardTestingVerdict_descriptor = internal_static_google_cloud_recaptchaenterprise_v1beta1_FraudPreventionAssessment_descriptor .getNestedTypes() .get(1); internal_static_google_cloud_recaptchaenterprise_v1beta1_FraudPreventionAssessment_CardTestingVerdict_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_recaptchaenterprise_v1beta1_FraudPreventionAssessment_CardTestingVerdict_descriptor, new java.lang.String[] { "Risk", }); internal_static_google_cloud_recaptchaenterprise_v1beta1_FraudPreventionAssessment_BehavioralTrustVerdict_descriptor = internal_static_google_cloud_recaptchaenterprise_v1beta1_FraudPreventionAssessment_descriptor .getNestedTypes() .get(2); internal_static_google_cloud_recaptchaenterprise_v1beta1_FraudPreventionAssessment_BehavioralTrustVerdict_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_recaptchaenterprise_v1beta1_FraudPreventionAssessment_BehavioralTrustVerdict_descriptor, new java.lang.String[] { "Trust", }); internal_static_google_cloud_recaptchaenterprise_v1beta1_AccountDefenderAssessment_descriptor = getDescriptor().getMessageTypes().get(10); internal_static_google_cloud_recaptchaenterprise_v1beta1_AccountDefenderAssessment_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_recaptchaenterprise_v1beta1_AccountDefenderAssessment_descriptor, new java.lang.String[] { "Labels", }); com.google.protobuf.ExtensionRegistry registry = com.google.protobuf.ExtensionRegistry.newInstance(); registry.add(com.google.api.ClientProto.defaultHost); registry.add(com.google.api.FieldBehaviorProto.fieldBehavior); registry.add(com.google.api.AnnotationsProto.http); registry.add(com.google.api.ClientProto.methodSignature); registry.add(com.google.api.ClientProto.oauthScopes); registry.add(com.google.api.ResourceProto.resource); registry.add(com.google.api.ResourceProto.resourceReference); com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor( descriptor, registry); com.google.api.AnnotationsProto.getDescriptor(); com.google.api.ClientProto.getDescriptor(); com.google.api.FieldBehaviorProto.getDescriptor(); com.google.api.ResourceProto.getDescriptor(); com.google.protobuf.TimestampProto.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }
googleapis/google-cloud-java
35,203
java-security-private-ca/proto-google-cloud-security-private-ca-v1/src/main/java/com/google/cloud/security/privateca/v1/UndeleteCertificateAuthorityRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/security/privateca/v1/service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.security.privateca.v1; /** * * * <pre> * Request message for * [CertificateAuthorityService.UndeleteCertificateAuthority][google.cloud.security.privateca.v1.CertificateAuthorityService.UndeleteCertificateAuthority]. * </pre> * * Protobuf type {@code google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest} */ public final class UndeleteCertificateAuthorityRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest) UndeleteCertificateAuthorityRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UndeleteCertificateAuthorityRequest.newBuilder() to construct. private UndeleteCertificateAuthorityRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UndeleteCertificateAuthorityRequest() { name_ = ""; requestId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UndeleteCertificateAuthorityRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.security.privateca.v1.PrivateCaProto .internal_static_google_cloud_security_privateca_v1_UndeleteCertificateAuthorityRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.security.privateca.v1.PrivateCaProto .internal_static_google_cloud_security_privateca_v1_UndeleteCertificateAuthorityRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest.class, com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest.Builder .class); } public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * Required. The resource name for this * [CertificateAuthority][google.cloud.security.privateca.v1.CertificateAuthority] * in the format `projects/&#42;&#47;locations/&#42;&#47;caPools/&#42;&#47;certificateAuthorities/&#42;`. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Required. The resource name for this * [CertificateAuthority][google.cloud.security.privateca.v1.CertificateAuthority] * in the format `projects/&#42;&#47;locations/&#42;&#47;caPools/&#42;&#47;certificateAuthorities/&#42;`. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int REQUEST_ID_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. An ID to identify requests. Specify a unique request ID so that * if you must retry your request, the server will know to ignore the request * if it has already been completed. The server will guarantee that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code> * string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... } * </code> * * @return The requestId. */ @java.lang.Override public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } } /** * * * <pre> * Optional. An ID to identify requests. Specify a unique request ID so that * if you must retry your request, the server will know to ignore the request * if it has already been completed. The server will guarantee that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code> * string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... } * </code> * * @return The bytes for requestId. */ @java.lang.Override public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, requestId_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, requestId_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest)) { return super.equals(obj); } com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest other = (com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest) obj; if (!getName().equals(other.getName())) return false; if (!getRequestId().equals(other.getRequestId())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER; hash = (53 * hash) + getRequestId().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [CertificateAuthorityService.UndeleteCertificateAuthority][google.cloud.security.privateca.v1.CertificateAuthorityService.UndeleteCertificateAuthority]. * </pre> * * Protobuf type {@code google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest) com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.security.privateca.v1.PrivateCaProto .internal_static_google_cloud_security_privateca_v1_UndeleteCertificateAuthorityRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.security.privateca.v1.PrivateCaProto .internal_static_google_cloud_security_privateca_v1_UndeleteCertificateAuthorityRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest.class, com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest.Builder .class); } // Construct using // com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; requestId_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.security.privateca.v1.PrivateCaProto .internal_static_google_cloud_security_privateca_v1_UndeleteCertificateAuthorityRequest_descriptor; } @java.lang.Override public com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest getDefaultInstanceForType() { return com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest build() { com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest buildPartial() { com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest result = new com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.requestId_ = requestId_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest) { return mergeFrom( (com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest other) { if (other == com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest .getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getRequestId().isEmpty()) { requestId_ = other.requestId_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { requestId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * * * <pre> * Required. The resource name for this * [CertificateAuthority][google.cloud.security.privateca.v1.CertificateAuthority] * in the format `projects/&#42;&#47;locations/&#42;&#47;caPools/&#42;&#47;certificateAuthorities/&#42;`. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The resource name for this * [CertificateAuthority][google.cloud.security.privateca.v1.CertificateAuthority] * in the format `projects/&#42;&#47;locations/&#42;&#47;caPools/&#42;&#47;certificateAuthorities/&#42;`. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The resource name for this * [CertificateAuthority][google.cloud.security.privateca.v1.CertificateAuthority] * in the format `projects/&#42;&#47;locations/&#42;&#47;caPools/&#42;&#47;certificateAuthorities/&#42;`. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The resource name for this * [CertificateAuthority][google.cloud.security.privateca.v1.CertificateAuthority] * in the format `projects/&#42;&#47;locations/&#42;&#47;caPools/&#42;&#47;certificateAuthorities/&#42;`. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The resource name for this * [CertificateAuthority][google.cloud.security.privateca.v1.CertificateAuthority] * in the format `projects/&#42;&#47;locations/&#42;&#47;caPools/&#42;&#47;certificateAuthorities/&#42;`. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. An ID to identify requests. Specify a unique request ID so that * if you must retry your request, the server will know to ignore the request * if it has already been completed. The server will guarantee that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code> * string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... } * </code> * * @return The requestId. */ public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. An ID to identify requests. Specify a unique request ID so that * if you must retry your request, the server will know to ignore the request * if it has already been completed. The server will guarantee that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code> * string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... } * </code> * * @return The bytes for requestId. */ public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. An ID to identify requests. Specify a unique request ID so that * if you must retry your request, the server will know to ignore the request * if it has already been completed. The server will guarantee that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code> * string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... } * </code> * * @param value The requestId to set. * @return This builder for chaining. */ public Builder setRequestId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } requestId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. An ID to identify requests. Specify a unique request ID so that * if you must retry your request, the server will know to ignore the request * if it has already been completed. The server will guarantee that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code> * string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearRequestId() { requestId_ = getDefaultInstance().getRequestId(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Optional. An ID to identify requests. Specify a unique request ID so that * if you must retry your request, the server will know to ignore the request * if it has already been completed. The server will guarantee that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code> * string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... } * </code> * * @param value The bytes for requestId to set. * @return This builder for chaining. */ public Builder setRequestIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); requestId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest) } // @@protoc_insertion_point(class_scope:google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest) private static final com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest(); } public static com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UndeleteCertificateAuthorityRequest> PARSER = new com.google.protobuf.AbstractParser<UndeleteCertificateAuthorityRequest>() { @java.lang.Override public UndeleteCertificateAuthorityRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UndeleteCertificateAuthorityRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UndeleteCertificateAuthorityRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.security.privateca.v1.UndeleteCertificateAuthorityRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/hive
35,224
ql/src/java/org/apache/hadoop/hive/ql/cache/results/QueryResultsCache.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.cache.results; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.util.concurrent.ThreadFactoryBuilder; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.function.Supplier; import java.util.stream.Stream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hive.common.metrics.common.Metrics; import org.apache.hadoop.hive.common.metrics.common.MetricsConstant; import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; import org.apache.hadoop.hive.common.metrics.common.MetricsVariable; import org.apache.hadoop.hive.common.ValidTxnWriteIdList; import org.apache.hadoop.hive.common.ValidWriteIdList; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.NotificationEvent; import org.apache.hadoop.hive.metastore.messaging.MessageBuilder; import org.apache.hadoop.hive.ql.hooks.Entity.Type; import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.metadata.events.EventConsumer; import org.apache.hadoop.hive.ql.parse.ColumnAccessInfo; import org.apache.hadoop.hive.ql.parse.TableAccessInfo; import org.apache.hadoop.hive.ql.plan.FetchWork; import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.apache.hive.common.util.TxnIdUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A class to handle management and lookup of cached Hive query results. */ public final class QueryResultsCache { private static final Logger LOG = LoggerFactory.getLogger(QueryResultsCache.class); public static class LookupInfo { private final String queryText; private final Supplier<ValidTxnWriteIdList> txnWriteIdListProvider; private final Set<Long> tableIds; public LookupInfo(String queryText, Supplier<ValidTxnWriteIdList> txnWriteIdListProvider, Set<Long> tableIds) { super(); this.queryText = queryText; this.txnWriteIdListProvider = txnWriteIdListProvider; this.tableIds = tableIds; } public String getQueryText() { return queryText; } } public static class QueryInfo { private long queryTime; private LookupInfo lookupInfo; private HiveOperation hiveOperation; private List<FieldSchema> resultSchema; private TableAccessInfo tableAccessInfo; private ColumnAccessInfo columnAccessInfo; private Set<ReadEntity> inputs; public QueryInfo( long queryTime, LookupInfo lookupInfo, HiveOperation hiveOperation, List<FieldSchema> resultSchema, TableAccessInfo tableAccessInfo, ColumnAccessInfo columnAccessInfo, Set<ReadEntity> inputs) { this.queryTime = queryTime; this.lookupInfo = lookupInfo; this.hiveOperation = hiveOperation; this.resultSchema = resultSchema; this.tableAccessInfo = tableAccessInfo; this.columnAccessInfo = columnAccessInfo; this.inputs = inputs; } public LookupInfo getLookupInfo() { return lookupInfo; } public void setLookupInfo(LookupInfo lookupInfo) { this.lookupInfo = lookupInfo; } public HiveOperation getHiveOperation() { return hiveOperation; } public void setHiveOperation(HiveOperation hiveOperation) { this.hiveOperation = hiveOperation; } public List<FieldSchema> getResultSchema() { return resultSchema; } public void setResultSchema(List<FieldSchema> resultSchema) { this.resultSchema = resultSchema; } public TableAccessInfo getTableAccessInfo() { return tableAccessInfo; } public void setTableAccessInfo(TableAccessInfo tableAccessInfo) { this.tableAccessInfo = tableAccessInfo; } public ColumnAccessInfo getColumnAccessInfo() { return columnAccessInfo; } public void setColumnAccessInfo(ColumnAccessInfo columnAccessInfo) { this.columnAccessInfo = columnAccessInfo; } public Set<ReadEntity> getInputs() { return inputs; } public void setInputs(Set<ReadEntity> inputs) { this.inputs = inputs; } public long getQueryTime() { return queryTime; } public void setQueryTime(long queryTime) { this.queryTime = queryTime; } } public enum CacheEntryStatus { VALID, INVALID, PENDING } public static class CacheEntry { private QueryInfo queryInfo; private FetchWork fetchWork; private Path cachedResultsPath; private Set<FileStatus> cachedResultPaths; // Cache administration private long size; private AtomicInteger readers = new AtomicInteger(0); private ScheduledFuture<?> invalidationFuture = null; private volatile CacheEntryStatus status = CacheEntryStatus.PENDING; private ValidTxnWriteIdList txnWriteIdList; public void releaseReader() { int readerCount = 0; synchronized (this) { readerCount = readers.decrementAndGet(); } LOG.debug("releaseReader: entry: {}, readerCount: {}", this, readerCount); cleanupIfNeeded(); } public String toString() { return String.format("CacheEntry#%s query: [ %s ], status: %s, location: %s, size: %d", System.identityHashCode(this), getQueryInfo().getLookupInfo().getQueryText(), status, cachedResultsPath, size); } public boolean addReader() { boolean added = false; int readerCount = 0; synchronized (this) { if (status == CacheEntryStatus.VALID) { readerCount = readers.incrementAndGet(); added = true; } } LOG.debug("addReader: entry: {}, readerCount: {}, added: {}", this, readerCount, added); return added; } private int numReaders() { return readers.get(); } private void invalidate() { LOG.info("Invalidating cache entry: {}", this); CacheEntryStatus prevStatus = setStatus(CacheEntryStatus.INVALID); if (prevStatus == CacheEntryStatus.VALID) { if (invalidationFuture != null) { // The cache entry has just been invalidated, no need for the scheduled invalidation. invalidationFuture.cancel(false); } cleanupIfNeeded(); decrementMetric(MetricsConstant.QC_VALID_ENTRIES); } else if (prevStatus == CacheEntryStatus.PENDING) { // Need to notify any queries waiting on the change from pending status. synchronized (this) { this.notifyAll(); } decrementMetric(MetricsConstant.QC_PENDING_FAILS); } } public CacheEntryStatus getStatus() { return status; } private CacheEntryStatus setStatus(CacheEntryStatus newStatus) { synchronized (this) { CacheEntryStatus oldStatus = status; status = newStatus; return oldStatus; } } private void cleanupIfNeeded() { if (status == CacheEntryStatus.INVALID && readers.get() <= 0) { QueryResultsCache.cleanupEntry(this); } } private String getQueryText() { return getQueryInfo().getLookupInfo().getQueryText(); } public FetchWork getFetchWork() { // FetchWork's sink is used to hold results, so each query needs a separate copy of FetchWork FetchWork fetch = new FetchWork(fetchWork.getTblDir(), fetchWork.getTblDesc(), fetchWork.getLimit()); fetch.setCachedResult(true); fetch.setFilesToFetch(this.cachedResultPaths); return fetch; } public QueryInfo getQueryInfo() { return queryInfo; } public Path getCachedResultsPath() { return cachedResultsPath; } /** * Wait for the cache entry to go from PENDING to VALID status. * @return true if the cache entry successfully changed to VALID status, * false if the status changes from PENDING to INVALID */ public boolean waitForValidStatus() { LOG.info("Waiting on pending cacheEntry: {}", this); long timeout = 1000; long startTime = System.nanoTime(); long endTime; while (true) { try { switch (status) { case VALID: endTime = System.nanoTime(); incrementMetric(MetricsConstant.QC_PENDING_SUCCESS_WAIT_TIME, TimeUnit.MILLISECONDS.convert(endTime - startTime, TimeUnit.NANOSECONDS)); return true; case INVALID: endTime = System.nanoTime(); incrementMetric(MetricsConstant.QC_PENDING_FAILS_WAIT_TIME, TimeUnit.MILLISECONDS.convert(endTime - startTime, TimeUnit.NANOSECONDS)); return false; case PENDING: // Status has not changed, continue waiting. break; } synchronized (this) { this.wait(timeout); } } catch (InterruptedException err) { Thread.currentThread().interrupt(); return false; } } } public Stream<String> getTableNames() { return queryInfo.getInputs().stream() .filter(readEntity -> readEntity.getType() == Type.TABLE) .map(readEntity -> readEntity.getTable().getFullyQualifiedName()); } } // Allow lookup by query string private final Map<String, Set<CacheEntry>> queryMap = new HashMap<String, Set<CacheEntry>>(); // LRU. Could also implement LRU as a doubly linked list if CacheEntry keeps its node. // Use synchronized map since even read actions cause the lru to get updated. private final Map<CacheEntry, CacheEntry> lru = Collections.synchronizedMap( new LinkedHashMap<CacheEntry, CacheEntry>(INITIAL_LRU_SIZE, LRU_LOAD_FACTOR, true)); // Lookup of cache entries by table used in the query, for cache invalidation. private final Map<String, Set<CacheEntry>> tableToEntryMap = new HashMap<>(); private final HiveConf conf; private Path cacheDirPath; private Path zeroRowsPath; private long cacheSize = 0; private long maxCacheSize; private long maxEntrySize; private long maxEntryLifetime; private ReadWriteLock rwLock = new ReentrantReadWriteLock(); private ScheduledFuture<?> invalidationPollFuture; private QueryResultsCache(HiveConf configuration) throws IOException { this.conf = configuration; // Set up cache directory Path rootCacheDir = new Path(conf.getVar(HiveConf.ConfVars.HIVE_QUERY_RESULTS_CACHE_DIRECTORY)); LOG.info("Initializing query results cache at {}", rootCacheDir); String currentCacheDirName = "results-" + UUID.randomUUID().toString(); cacheDirPath = new Path(rootCacheDir, currentCacheDirName); FileSystem fs = cacheDirPath.getFileSystem(conf); FsPermission fsPermission = new FsPermission("700"); fs.mkdirs(cacheDirPath, fsPermission); // Create non-existent path for 0-row results zeroRowsPath = new Path(cacheDirPath, "dummy_zero_rows"); // Results cache directory should be cleaned up at process termination. fs.deleteOnExit(cacheDirPath); maxCacheSize = conf.getLongVar(HiveConf.ConfVars.HIVE_QUERY_RESULTS_CACHE_MAX_SIZE); maxEntrySize = conf.getLongVar(HiveConf.ConfVars.HIVE_QUERY_RESULTS_CACHE_MAX_ENTRY_SIZE); maxEntryLifetime = conf.getTimeVar( HiveConf.ConfVars.HIVE_QUERY_RESULTS_CACHE_MAX_ENTRY_LIFETIME, TimeUnit.MILLISECONDS); LOG.info("Query results cache: cacheDirectory {}, maxCacheSize {}, maxEntrySize {}, maxEntryLifetime {}", cacheDirPath, maxCacheSize, maxEntrySize, maxEntryLifetime); } private static final AtomicBoolean inited = new AtomicBoolean(false); private static QueryResultsCache instance; public static void initialize(HiveConf conf) throws IOException { if (!inited.getAndSet(true)) { try { instance = new QueryResultsCache(conf); Metrics metrics = MetricsFactory.getInstance(); if (metrics != null) { registerMetrics(metrics, instance); } } catch (Exception err) { inited.set(false); throw err; } } } public static QueryResultsCache getInstance() { return instance; } public Path getCacheDirPath() { return cacheDirPath; } /** * Check if the cache contains an entry for the requested LookupInfo. * @param request * @return The cached result if there is a match in the cache, or null if no match is found. */ public CacheEntry lookup(LookupInfo request) { CacheEntry result = null; LOG.debug("QueryResultsCache lookup for query: {}", request.queryText); boolean foundPending = false; // Cannot entries while we currently hold read lock, so keep track of them to delete later. Set<CacheEntry> entriesToRemove = new HashSet<CacheEntry>(); Lock readLock = rwLock.readLock(); try { // Note: ReentrantReadWriteLock does not allow upgrading a read lock to a write lock. // Care must be taken while under read lock, to make sure we do not perform any actions // which attempt to take a write lock. readLock.lock(); Set<CacheEntry> candidates = queryMap.get(request.queryText); if (candidates != null) { CacheEntry pendingResult = null; for (CacheEntry candidate : candidates) { if (entryMatches(request, candidate, entriesToRemove)) { CacheEntryStatus entryStatus = candidate.status; if (entryStatus == CacheEntryStatus.VALID) { result = candidate; break; } else if (entryStatus == CacheEntryStatus.PENDING && pendingResult == null) { pendingResult = candidate; } } } // Try to find valid entry, but settle for pending entry if that is all we have. if (result == null && pendingResult != null) { result = pendingResult; foundPending = true; } if (result != null) { lru.get(result); // Update LRU } } } finally { readLock.unlock(); } // Now that we have exited read lock it is safe to remove any invalid entries. for (CacheEntry invalidEntry : entriesToRemove) { removeEntry(invalidEntry); } LOG.debug("QueryResultsCache lookup result: {}", result); incrementMetric(MetricsConstant.QC_LOOKUPS); if (result != null) { if (foundPending) { incrementMetric(MetricsConstant.QC_PENDING_HITS); } else { incrementMetric(MetricsConstant.QC_VALID_HITS); } } return result; } /** * Add an entry to the cache. * The new entry will be in PENDING state and not usable setEntryValid() is called on the entry. * @param queryInfo * @return */ public CacheEntry addToCache(QueryInfo queryInfo, ValidTxnWriteIdList txnWriteIdList) { // Create placeholder entry with PENDING state. String queryText = queryInfo.getLookupInfo().getQueryText(); CacheEntry addedEntry = new CacheEntry(); addedEntry.queryInfo = queryInfo; addedEntry.txnWriteIdList = txnWriteIdList; Lock writeLock = rwLock.writeLock(); try { writeLock.lock(); LOG.info("Adding placeholder cache entry for query '{}'", queryText); // Add the entry to the cache structures while under write lock. addToEntryMap(queryMap, queryText, addedEntry); lru.put(addedEntry, addedEntry); // Index of entries by table usage. addedEntry.getTableNames() .forEach(tableName -> addToEntryMap(tableToEntryMap, tableName, addedEntry)); } finally { writeLock.unlock(); } return addedEntry; } /** * Updates a pending cache entry with a FetchWork result from a finished query. * If successful the cache entry will be set to valid status and be usable for cached queries. * Important: Adding the entry to the cache will increment the reader count for the cache entry. * CacheEntry.releaseReader() should be called when the caller is done with the cache entry. * @param cacheEntry * @param fetchWork * @return */ public boolean setEntryValid(CacheEntry cacheEntry, FetchWork fetchWork) { Path queryResultsPath = null; Path cachedResultsPath = null; try { // if we are here file sink op should have created files to fetch from assert(fetchWork.getFilesToFetch() != null ); boolean requiresCaching = true; queryResultsPath = fetchWork.getTblDir(); FileSystem resultsFs = queryResultsPath.getFileSystem(conf); long resultSize = 0; for(FileStatus fs:fetchWork.getFilesToFetch()) { if(resultsFs.exists(fs.getPath())) { resultSize += fs.getLen(); } else { // No actual result directory, no need to cache anything. requiresCaching = false; break; } } if (!shouldEntryBeAdded(cacheEntry, resultSize)) { return false; } // Synchronize on the cache entry so that no one else can invalidate this entry // while we are in the process of setting it to valid. synchronized (cacheEntry) { if (cacheEntry.getStatus() == CacheEntryStatus.INVALID) { // Entry either expired, or was invalidated due to table updates return false; } if (requiresCaching) { cacheEntry.cachedResultPaths = new HashSet<>(); for(FileStatus fs:fetchWork.getFilesToFetch()) { cacheEntry.cachedResultPaths.add(fs); } LOG.info("Cached query result paths located at {} (size {}) for query '{}'", queryResultsPath, resultSize, cacheEntry.getQueryText()); } // Create a new FetchWork to reference the new cache location. FetchWork fetchWorkForCache = new FetchWork(fetchWork.getTblDir(), fetchWork.getTblDesc(), fetchWork.getLimit()); fetchWorkForCache.setCachedResult(true); fetchWorkForCache.setFilesToFetch(fetchWork.getFilesToFetch()); cacheEntry.fetchWork = fetchWorkForCache; //cacheEntry.cachedResultsPath = cachedResultsPath; cacheEntry.size = resultSize; this.cacheSize += resultSize; cacheEntry.setStatus(CacheEntryStatus.VALID); // Mark this entry as being in use. Caller will need to release later. cacheEntry.addReader(); scheduleEntryInvalidation(cacheEntry); // Notify any queries waiting on this cacheEntry to become valid. cacheEntry.notifyAll(); } incrementMetric(MetricsConstant.QC_VALID_ENTRIES); incrementMetric(MetricsConstant.QC_TOTAL_ENTRIES_ADDED); } catch (Exception err) { String queryText = cacheEntry.getQueryText(); LOG.error("Failed to create cache entry for query results for query: " + queryText, err); cacheEntry.size = 0; cacheEntry.cachedResultsPath = null; // Invalidate the entry. Rely on query cleanup to remove from lookup. cacheEntry.invalidate(); return false; } return true; } public void clear() { Lock writeLock = rwLock.writeLock(); try { writeLock.lock(); LOG.info("Clearing the results cache"); CacheEntry[] allEntries = null; synchronized (lru) { allEntries = lru.keySet().toArray(EMPTY_CACHEENTRY_ARRAY); } for (CacheEntry entry : allEntries) { try { removeEntry(entry); } catch (Exception err) { LOG.error("Error removing cache entry " + entry, err); } } } finally { writeLock.unlock(); } } public long getSize() { Lock readLock = rwLock.readLock(); try { readLock.lock(); return cacheSize; } finally { readLock.unlock(); } } public void notifyTableChanged(String dbName, String tableName, long updateTime) { LOG.debug("Table changed: {}.{}, at {}", dbName, tableName, updateTime); // Invalidate all cache entries using this table. List<CacheEntry> entriesToInvalidate = null; rwLock.writeLock().lock(); try { String key = (dbName.toLowerCase() + "." + tableName.toLowerCase()); Set<CacheEntry> entriesForTable = tableToEntryMap.get(key); if (entriesForTable != null) { // Possible concurrent modification issues if we try to remove cache entries while // traversing the cache structures. Save the entries to remove in a separate list. entriesToInvalidate = new ArrayList<>(entriesForTable); } if (entriesToInvalidate != null) { for (CacheEntry entry : entriesToInvalidate) { // Ignore updates that occured before this cached query was created. if (entry.getQueryInfo().getQueryTime() <= updateTime) { removeEntry(entry); } } } } finally { rwLock.writeLock().unlock(); } } private static final int INITIAL_LRU_SIZE = 16; private static final float LRU_LOAD_FACTOR = 0.75f; private static final CacheEntry[] EMPTY_CACHEENTRY_ARRAY = {}; /** * Check that the cache entry matches the lookupInfo. * @param lookupInfo * @param entry * @param entriesToRemove Set of entries to be removed after exiting read lock section. * If the entry is found to be invalid it will be added to this set. * @return */ private boolean entryMatches(LookupInfo lookupInfo, CacheEntry entry, Set<CacheEntry> entriesToRemove) { QueryInfo queryInfo = entry.getQueryInfo(); for (ReadEntity readEntity : queryInfo.getInputs()) { if (readEntity.getType() == Type.TABLE) { Table tableUsed = readEntity.getTable(); // we want that the lookupInfo.tableIds are all covered by the table ids of the cache entry // the query is used as cache key, so the lookup and the entry should use the same number of tables // so it is enough to check whether every cache table id is contained in the lookup long id = tableUsed.getTTable().getId(); if (!lookupInfo.tableIds.contains(id)) { LOG.debug("Cache entry contains a table (tableId={}) that is not present in the query", id); return false; } // Check that the tables used do not resolve to temp tables. Map<String, Table> tempTables = SessionHiveMetaStoreClient.getTempTablesForDatabase(tableUsed.getDbName(), tableUsed.getTableName()); if (tempTables != null && tempTables.containsKey(tableUsed.getTableName())) { LOG.info("{} resolves to a temporary table in the current session. This query cannot use the cache.", tableUsed.getTableName()); return false; } // Has the table changed since the query was cached? // For transactional tables, can compare the table writeIDs of the current/cached query. if (AcidUtils.isTransactionalTable(tableUsed)) { boolean writeIdCheckPassed = false; String tableName = tableUsed.getFullyQualifiedName(); ValidTxnWriteIdList currentTxnWriteIdList = lookupInfo.txnWriteIdListProvider.get(); if (currentTxnWriteIdList == null) { LOG.warn("Current query's txnWriteIdList is null!"); return false; } if (entry.txnWriteIdList == null) { LOG.warn("Cache entry's txnWriteIdList is null!"); return false; } ValidWriteIdList currentWriteIdForTable = currentTxnWriteIdList.getTableValidWriteIdList(tableName); ValidWriteIdList cachedWriteIdForTable = entry.txnWriteIdList.getTableValidWriteIdList(tableName); LOG.debug("Checking writeIds for table {}: currentWriteIdForTable {}, cachedWriteIdForTable {}", tableName, currentWriteIdForTable, cachedWriteIdForTable); if (currentWriteIdForTable != null && cachedWriteIdForTable != null) { if (TxnIdUtils.checkEquivalentWriteIds(currentWriteIdForTable, cachedWriteIdForTable)) { writeIdCheckPassed = true; } } if (!writeIdCheckPassed) { LOG.debug("Cached query no longer valid due to table {}", tableUsed.getFullyQualifiedName()); // We can invalidate the entry now, but calling removeEntry() requires a write lock // and we may already have read lock taken now. Add to entriesToRemove to delete later. entriesToRemove.add(entry); entry.invalidate(); return false; } } } } return true; } public void removeEntry(CacheEntry entry) { entry.invalidate(); rwLock.writeLock().lock(); try { removeFromLookup(entry); lru.remove(entry); // Should the cache size be updated here, or after the result data has actually been deleted? cacheSize -= entry.size; } finally { rwLock.writeLock().unlock(); } } private void removeFromLookup(CacheEntry entry) { String queryString = entry.getQueryText(); if (!removeFromEntryMap(queryMap, queryString, entry)) { LOG.warn("Attempted to remove entry but it was not in the cache: {}", entry); } // Remove this entry from the table usage mappings. entry.getTableNames() .forEach(tableName -> removeFromEntryMap(tableToEntryMap, tableName, entry)); } private void calculateEntrySize(CacheEntry entry, FetchWork fetchWork) throws IOException { Path queryResultsPath = fetchWork.getTblDir(); FileSystem resultsFs = queryResultsPath.getFileSystem(conf); ContentSummary cs = resultsFs.getContentSummary(queryResultsPath); entry.size = cs.getLength(); } /** * Determines if the cache entry should be added to the results cache. */ private boolean shouldEntryBeAdded(CacheEntry entry, long size) { // Assumes the cache lock has already been taken. if (maxEntrySize >= 0 && size > maxEntrySize) { LOG.debug("Cache entry size {} larger than max entry size ({})", size, maxEntrySize); incrementMetric(MetricsConstant.QC_REJECTED_TOO_LARGE); return false; } if (!clearSpaceForCacheEntry(entry, size)) { return false; } return true; } private boolean hasSpaceForCacheEntry(CacheEntry entry, long size) { if (maxCacheSize >= 0) { return (cacheSize + size) <= maxCacheSize; } // Negative max cache size means unbounded. return true; } private CacheEntry findEntryToRemove() { // Entries should be in LRU order in the keyset iterator. Set<CacheEntry> entries = lru.keySet(); synchronized (lru) { for (CacheEntry removalCandidate : entries) { if (removalCandidate.getStatus() != CacheEntryStatus.VALID) { continue; } return removalCandidate; } } return null; } private boolean clearSpaceForCacheEntry(CacheEntry entry, long size) { if (hasSpaceForCacheEntry(entry, size)) { return true; } LOG.info("Clearing space for cache entry for query: [{}] with size {}", entry.getQueryText(), size); CacheEntry removalCandidate; while ((removalCandidate = findEntryToRemove()) != null) { LOG.info("Removing entry: {}", removalCandidate); removeEntry(removalCandidate); // TODO: Should we wait for the entry to actually be deleted from HDFS? Would have to // poll the reader count, waiting for it to reach 0, at which point cleanup should occur. if (hasSpaceForCacheEntry(entry, size)) { return true; } } LOG.info("Could not free enough space for cache entry for query: [{}] withe size {}", entry.getQueryText(), size); return false; } private static void addToEntryMap(Map<String, Set<CacheEntry>> entryMap, String key, CacheEntry entry) { Set<CacheEntry> entriesForKey = entryMap.get(key); if (entriesForKey == null) { entriesForKey = new HashSet<CacheEntry>(); entryMap.put(key, entriesForKey); } entriesForKey.add(entry); } private static boolean removeFromEntryMap(Map<String, Set<CacheEntry>> entryMap, String key, CacheEntry entry) { Set<CacheEntry> entries = entryMap.get(key); if (entries == null) { return false; } boolean deleted = entries.remove(entry); if (!deleted) { return false; } if (entries.isEmpty()) { entryMap.remove(key); } return true; } @VisibleForTesting public static void cleanupInstance() { // This should only ever be called in testing scenarios. // There should not be any other users of the cache or its entries or this may mess up cleanup. if (inited.get()) { if (instance.invalidationPollFuture != null) { instance.invalidationPollFuture.cancel(true); instance.invalidationPollFuture = null; } instance.clear(); instance = null; inited.set(false); } } private static ScheduledExecutorService invalidationExecutor = null; private static ExecutorService deletionExecutor = null; static { ThreadFactory threadFactory = new ThreadFactoryBuilder().setDaemon(true).setNameFormat("QueryResultsCache %d").build(); invalidationExecutor = Executors.newSingleThreadScheduledExecutor(threadFactory); deletionExecutor = Executors.newSingleThreadExecutor(threadFactory); } private void scheduleEntryInvalidation(final CacheEntry entry) { if (maxEntryLifetime >= 0) { // Schedule task to invalidate cache entry and remove from lookup. ScheduledFuture<?> future = invalidationExecutor.schedule(new Runnable() { @Override public void run() { removeEntry(entry); } }, maxEntryLifetime, TimeUnit.MILLISECONDS); entry.invalidationFuture = future; } } private static void cleanupEntry(final CacheEntry entry) { Preconditions.checkState(entry.getStatus() == CacheEntryStatus.INVALID); final HiveConf conf = getInstance().conf; if (entry.cachedResultsPath != null && !getInstance().zeroRowsPath.equals(entry.cachedResultsPath)) { deletionExecutor.execute(new Runnable() { @Override public void run() { Path path = entry.cachedResultsPath; LOG.info("Cache directory cleanup: deleting {}", path); try { FileSystem fs = entry.cachedResultsPath.getFileSystem(getInstance().conf); fs.delete(entry.cachedResultsPath, true); } catch (Exception err) { LOG.error("Error while trying to delete " + path, err); } } }); } } public static void incrementMetric(String name, long count) { Metrics metrics = MetricsFactory.getInstance(); if (metrics != null) { metrics.incrementCounter(name, count); } } public static void decrementMetric(String name, long count) { Metrics metrics = MetricsFactory.getInstance(); if (metrics != null) { metrics.decrementCounter(name, count); } } public static void incrementMetric(String name) { incrementMetric(name, 1); } public static void decrementMetric(String name) { decrementMetric(name, 1); } private static void registerMetrics(Metrics metrics, final QueryResultsCache cache) { MetricsVariable<Long> maxCacheSize = new MetricsVariable<Long>() { @Override public Long getValue() { return cache.maxCacheSize; } }; MetricsVariable<Long> curCacheSize = new MetricsVariable<Long>() { @Override public Long getValue() { return cache.cacheSize; } }; metrics.addGauge(MetricsConstant.QC_MAX_SIZE, maxCacheSize); metrics.addGauge(MetricsConstant.QC_CURRENT_SIZE, curCacheSize); } // EventConsumer to invalidate cache entries based on metastore notification events (alter table, add partition, etc). public static class InvalidationEventConsumer implements EventConsumer { Configuration conf; @Override public Configuration getConf() { return conf; } @Override public void setConf(Configuration conf) { this.conf = conf; } @Override public void accept(NotificationEvent event) { String dbName; String tableName; switch (event.getEventType()) { case MessageBuilder.ADD_PARTITION_EVENT: case MessageBuilder.ALTER_PARTITION_EVENT: case MessageBuilder.ALTER_PARTITIONS_EVENT: case MessageBuilder.DROP_PARTITION_EVENT: case MessageBuilder.ALTER_TABLE_EVENT: case MessageBuilder.DROP_TABLE_EVENT: case MessageBuilder.INSERT_EVENT: dbName = event.getDbName(); tableName = event.getTableName(); break; default: return; } if (dbName == null || tableName == null) { LOG.info("Possibly malformed notification event, missing db or table name: {}", event); return; } LOG.debug("Handling event {} on table {}.{}", event.getEventType(), dbName, tableName); QueryResultsCache cache = QueryResultsCache.getInstance(); if (cache != null) { long eventTime = event.getEventTime() * 1000L; cache.notifyTableChanged(dbName, tableName, eventTime); } else { LOG.debug("Cache not instantiated, skipping event on {}.{}", dbName, tableName); } } } }
apache/samza
34,806
samza-core/src/test/java/org/apache/samza/container/grouper/stream/TestGroupBySystemStreamPartitionWithGrouperProxy.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.samza.container.grouper.stream; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.IntStream; import org.apache.samza.Partition; import org.apache.samza.config.MapConfig; import org.apache.samza.config.StorageConfig; import org.apache.samza.container.TaskName; import org.apache.samza.container.grouper.task.GrouperMetadata; import org.apache.samza.container.grouper.task.GrouperMetadataImpl; import org.apache.samza.system.SystemStreamPartition; import org.junit.Assert; import org.junit.Test; public class TestGroupBySystemStreamPartitionWithGrouperProxy { @Test public void testSingleStreamRepartitioning() { Map<TaskName, List<SystemStreamPartition>> prevGroupingWithSingleStream = ImmutableMap.<TaskName, List<SystemStreamPartition>>builder() .put(new TaskName("SystemStreamPartition [kafka, PVE, 0]"), ImmutableList.of(new SystemStreamPartition("kafka", "PVE", new Partition(0)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 1]"), ImmutableList.of(new SystemStreamPartition("kafka", "PVE", new Partition(1)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 2]"), ImmutableList.of(new SystemStreamPartition("kafka", "PVE", new Partition(2)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 3]"), ImmutableList.of(new SystemStreamPartition("kafka", "PVE", new Partition(3)))) .build(); Set<SystemStreamPartition> currSsps = IntStream.range(0, 8) .mapToObj(partitionId -> new SystemStreamPartition("kafka", "PVE", new Partition(partitionId))) .collect(Collectors.toSet()); Map<TaskName, Set<SystemStreamPartition>> expectedGroupingForStateful = ImmutableMap.<TaskName, Set<SystemStreamPartition>>builder() .put(new TaskName("SystemStreamPartition [kafka, PVE, 1]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(1)), new SystemStreamPartition("kafka", "PVE", new Partition(5)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 0]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(0)), new SystemStreamPartition("kafka", "PVE", new Partition(4)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 3]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(7)), new SystemStreamPartition("kafka", "PVE", new Partition(3)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 2]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(2)), new SystemStreamPartition("kafka", "PVE", new Partition(6)))) .build(); Map<TaskName, Set<SystemStreamPartition>> expectedGroupingForStateless = ImmutableMap.<TaskName, Set<SystemStreamPartition>>builder() .put(new TaskName("SystemStreamPartition [kafka, PVE, 1]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(1)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 0]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(0)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 3]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(3)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 2]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(2)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 5]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(5)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 4]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(4)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 7]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(7)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 6]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(6)))) .build(); // SSPGrouperProxy for stateful job SSPGrouperProxy groupBySystemStreamPartition = buildSspGrouperProxy(true); GrouperMetadata grouperMetadata = new GrouperMetadataImpl(new HashMap<>(), new HashMap<>(), prevGroupingWithSingleStream, new HashMap<>()); Map<TaskName, Set<SystemStreamPartition>> finalGrouping = groupBySystemStreamPartition.group(currSsps, grouperMetadata); Assert.assertEquals(expectedGroupingForStateful, finalGrouping); // SSPGrouperProxy for stateless job groupBySystemStreamPartition = buildSspGrouperProxy(false); finalGrouping = groupBySystemStreamPartition.group(currSsps, grouperMetadata); Assert.assertEquals(expectedGroupingForStateless, finalGrouping); } @Test public void testMultipleStreamsWithSingleStreamExpansionAndNewStream() { Map<TaskName, List<SystemStreamPartition>> prevGroupingWithMultipleStreams = ImmutableMap.<TaskName, List<SystemStreamPartition>>builder() .put(new TaskName("SystemStreamPartition [kafka, PVE, 0]"), ImmutableList.of(new SystemStreamPartition("kafka", "PVE", new Partition(0)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 1]"), ImmutableList.of(new SystemStreamPartition("kafka", "PVE", new Partition(1)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 2]"), ImmutableList.of(new SystemStreamPartition("kafka", "PVE", new Partition(2)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 3]"), ImmutableList.of(new SystemStreamPartition("kafka", "PVE", new Partition(3)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 0]"), ImmutableList.of(new SystemStreamPartition("kafka", "URE", new Partition(0)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 1]"), ImmutableList.of(new SystemStreamPartition("kafka", "URE", new Partition(1)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 2]"), ImmutableList.of(new SystemStreamPartition("kafka", "URE", new Partition(2)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 3]"), ImmutableList.of(new SystemStreamPartition("kafka", "URE", new Partition(3)))) .build(); Set<SystemStreamPartition> currSsps = IntStream.range(0, 8) .mapToObj(partitionId -> new SystemStreamPartition("kafka", "PVE", new Partition(partitionId))) .collect(Collectors.toSet()); IntStream.range(0, 8).forEach(partitionId -> currSsps.add(new SystemStreamPartition("kafka", "BOB", new Partition(partitionId)))); IntStream.range(0, 4).forEach(partitionId -> currSsps.add(new SystemStreamPartition("kafka", "URE", new Partition(partitionId)))); Map<TaskName, Set<SystemStreamPartition>> expectedGroupingForStateful = ImmutableMap.<TaskName, Set<SystemStreamPartition>>builder() .put(new TaskName("SystemStreamPartition [kafka, BOB, 2]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(2)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 1]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(1)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 0]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(0)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 7]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(7)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 6]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(6)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 4]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(4)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 5]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(5)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 3]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(3)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 1]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(1)), new SystemStreamPartition("kafka", "PVE", new Partition(5)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 2]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(2)), new SystemStreamPartition("kafka", "PVE", new Partition(6)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 3]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(3)), new SystemStreamPartition("kafka", "PVE", new Partition(7)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 0]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(0)), new SystemStreamPartition("kafka", "PVE", new Partition(4)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 3]"), ImmutableSet.of(new SystemStreamPartition("kafka", "URE", new Partition(3)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 2]"), ImmutableSet.of(new SystemStreamPartition("kafka", "URE", new Partition(2)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 1]"), ImmutableSet.of(new SystemStreamPartition("kafka", "URE", new Partition(1)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 0]"), ImmutableSet.of(new SystemStreamPartition("kafka", "URE", new Partition(0)))) .build(); Map<TaskName, Set<SystemStreamPartition>> expectedGroupingForStateless = ImmutableMap.<TaskName, Set<SystemStreamPartition>>builder() .put(new TaskName("SystemStreamPartition [kafka, BOB, 2]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(2)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 1]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(1)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 0]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(0)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 7]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(7)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 6]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(6)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 4]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(4)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 5]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(5)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 3]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(3)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 1]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(1)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 2]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(2)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 3]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(3)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 0]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(0)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 4]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(4)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 5]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(5)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 6]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(6)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 7]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(7)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 3]"), ImmutableSet.of(new SystemStreamPartition("kafka", "URE", new Partition(3)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 2]"), ImmutableSet.of(new SystemStreamPartition("kafka", "URE", new Partition(2)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 1]"), ImmutableSet.of(new SystemStreamPartition("kafka", "URE", new Partition(1)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 0]"), ImmutableSet.of(new SystemStreamPartition("kafka", "URE", new Partition(0)))) .build(); // SSPGrouperProxy for stateful job SSPGrouperProxy groupBySystemStreamPartition = buildSspGrouperProxy(true); GrouperMetadata grouperMetadata = new GrouperMetadataImpl(new HashMap<>(), new HashMap<>(), prevGroupingWithMultipleStreams, new HashMap<>()); Map<TaskName, Set<SystemStreamPartition>> finalGrouping = groupBySystemStreamPartition.group(currSsps, grouperMetadata); Assert.assertEquals(expectedGroupingForStateful, finalGrouping); // SSPGrouperProxy for stateless job groupBySystemStreamPartition = buildSspGrouperProxy(false); finalGrouping = groupBySystemStreamPartition.group(currSsps, grouperMetadata); Assert.assertEquals(expectedGroupingForStateless, finalGrouping); } @Test public void testRemovalOfPreviousStreamsAndThenAddNewStream() { Map<TaskName, List<SystemStreamPartition>> prevGroupingWithMultipleStreams = ImmutableMap.<TaskName, List<SystemStreamPartition>>builder() .put(new TaskName("SystemStreamPartition [kafka, PVE, 0]"), ImmutableList.of(new SystemStreamPartition("kafka", "PVE", new Partition(0)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 1]"), ImmutableList.of(new SystemStreamPartition("kafka", "PVE", new Partition(1)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 2]"), ImmutableList.of(new SystemStreamPartition("kafka", "PVE", new Partition(2)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 3]"), ImmutableList.of(new SystemStreamPartition("kafka", "PVE", new Partition(3)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 0]"), ImmutableList.of(new SystemStreamPartition("kafka", "PVE", new Partition(0)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 1]"), ImmutableList.of(new SystemStreamPartition("kafka", "PVE", new Partition(1)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 2]"), ImmutableList.of(new SystemStreamPartition("kafka", "PVE", new Partition(2)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 3]"), ImmutableList.of(new SystemStreamPartition("kafka", "PVE", new Partition(3)))) .build(); Set<SystemStreamPartition> currSsps = IntStream.range(0, 8) .mapToObj(partitionId -> new SystemStreamPartition("kafka", "BOB", new Partition(partitionId))) .collect(Collectors.toSet()); Map<TaskName, Set<SystemStreamPartition>> expectedGroupingForStatefulAndStateless = ImmutableMap.<TaskName, Set<SystemStreamPartition>>builder() .put(new TaskName("SystemStreamPartition [kafka, BOB, 1]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(1)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 0]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(0)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 3]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(3)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 2]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(2)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 5]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(5)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 4]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(4)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 7]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(7)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 6]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(6)))) .build(); // SSPGrouperProxy for stateful job SSPGrouperProxy groupBySystemStreamPartition = buildSspGrouperProxy(true); GrouperMetadata grouperMetadata = new GrouperMetadataImpl(new HashMap<>(), new HashMap<>(), prevGroupingWithMultipleStreams, new HashMap<>()); Map<TaskName, Set<SystemStreamPartition>> finalGrouping = groupBySystemStreamPartition.group(currSsps, grouperMetadata); Assert.assertEquals(expectedGroupingForStatefulAndStateless, finalGrouping); // SSPGrouperProxy for stateless job groupBySystemStreamPartition = buildSspGrouperProxy(false); finalGrouping = groupBySystemStreamPartition.group(currSsps, grouperMetadata); Assert.assertEquals(expectedGroupingForStatefulAndStateless, finalGrouping); } @Test public void testRemovalAndAdditionOfStreamsWithExpansion() { Map<TaskName, List<SystemStreamPartition>> prevGroupingWithMultipleStreams = ImmutableMap.<TaskName, List<SystemStreamPartition>>builder() .put(new TaskName("SystemStreamPartition [kafka, PVE, 0]"), ImmutableList.of(new SystemStreamPartition("kafka", "PVE", new Partition(0)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 1]"), ImmutableList.of(new SystemStreamPartition("kafka", "PVE", new Partition(1)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 2]"), ImmutableList.of(new SystemStreamPartition("kafka", "PVE", new Partition(2)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 3]"), ImmutableList.of(new SystemStreamPartition("kafka", "PVE", new Partition(3)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 0]"), ImmutableList.of(new SystemStreamPartition("kafka", "URE", new Partition(0)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 1]"), ImmutableList.of(new SystemStreamPartition("kafka", "URE", new Partition(1)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 2]"), ImmutableList.of(new SystemStreamPartition("kafka", "URE", new Partition(2)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 3]"), ImmutableList.of(new SystemStreamPartition("kafka", "URE", new Partition(3)))) .build(); Set<SystemStreamPartition> currSsps = IntStream.range(0, 8) .mapToObj(partitionId -> new SystemStreamPartition("kafka", "PVE", new Partition(partitionId))) .collect(Collectors.toSet()); IntStream.range(0, 8).forEach(partitionId -> currSsps.add(new SystemStreamPartition("kafka", "BOB", new Partition(partitionId)))); Map<TaskName, Set<SystemStreamPartition>> expectedGroupingForStateful = ImmutableMap.<TaskName, Set<SystemStreamPartition>>builder() .put(new TaskName("SystemStreamPartition [kafka, PVE, 1]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(5)), new SystemStreamPartition("kafka", "PVE", new Partition(1)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 2]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(6)), new SystemStreamPartition("kafka", "PVE", new Partition(2)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 3]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(7)), new SystemStreamPartition("kafka", "PVE", new Partition(3)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 0]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(4)), new SystemStreamPartition("kafka", "PVE", new Partition(0)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 7]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(7)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 6]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(6)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 5]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(5)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 4]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(4)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 3]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(3)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 2]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(2)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 1]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(1)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 0]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(0)))) .build(); Map<TaskName, Set<SystemStreamPartition>> expectedGroupingForStateless = ImmutableMap.<TaskName, Set<SystemStreamPartition>>builder() .put(new TaskName("SystemStreamPartition [kafka, PVE, 1]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(1)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 2]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(2)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 3]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(3)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 0]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(0)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 4]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(4)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 5]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(5)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 6]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(6)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 7]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(7)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 7]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(7)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 6]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(6)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 5]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(5)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 4]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(4)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 3]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(3)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 2]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(2)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 1]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(1)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 0]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(0)))) .build(); // SSPGrouperProxy for stateful job SSPGrouperProxy groupBySystemStreamPartition = buildSspGrouperProxy(true); GrouperMetadata grouperMetadata = new GrouperMetadataImpl(new HashMap<>(), new HashMap<>(), prevGroupingWithMultipleStreams, new HashMap<>()); Map<TaskName, Set<SystemStreamPartition>> finalGrouping = groupBySystemStreamPartition.group(currSsps, grouperMetadata); Assert.assertEquals(expectedGroupingForStateful, finalGrouping); // SSPGrouperProxy for stateless job groupBySystemStreamPartition = buildSspGrouperProxy(false); finalGrouping = groupBySystemStreamPartition.group(currSsps, grouperMetadata); Assert.assertEquals(expectedGroupingForStateless, finalGrouping); } @Test public void testMultipleStreamExpansionWithNewStreams() { Map<TaskName, List<SystemStreamPartition>> prevGroupingWithMultipleStreams = ImmutableMap.<TaskName, List<SystemStreamPartition>>builder() .put(new TaskName("SystemStreamPartition [kafka, PVE, 0]"), ImmutableList.of(new SystemStreamPartition("kafka", "PVE", new Partition(0)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 1]"), ImmutableList.of(new SystemStreamPartition("kafka", "PVE", new Partition(1)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 2]"), ImmutableList.of(new SystemStreamPartition("kafka", "PVE", new Partition(2)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 3]"), ImmutableList.of(new SystemStreamPartition("kafka", "PVE", new Partition(3)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 0]"), ImmutableList.of(new SystemStreamPartition("kafka", "URE", new Partition(0)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 1]"), ImmutableList.of(new SystemStreamPartition("kafka", "URE", new Partition(1)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 2]"), ImmutableList.of(new SystemStreamPartition("kafka", "URE", new Partition(2)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 3]"), ImmutableList.of(new SystemStreamPartition("kafka", "URE", new Partition(3)))) .build(); Set<SystemStreamPartition> currSsps = IntStream.range(0, 8) .mapToObj(partitionId -> new SystemStreamPartition("kafka", "PVE", new Partition(partitionId))) .collect(Collectors.toSet()); IntStream.range(0, 8).forEach(partitionId -> currSsps.add(new SystemStreamPartition("kafka", "BOB", new Partition(partitionId)))); IntStream.range(0, 8).forEach(partitionId -> currSsps.add(new SystemStreamPartition("kafka", "URE", new Partition(partitionId)))); Map<TaskName, Set<SystemStreamPartition>> expectedGroupingForStateful = ImmutableMap.<TaskName, Set<SystemStreamPartition>>builder() .put(new TaskName("SystemStreamPartition [kafka, BOB, 1]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(1)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 0]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(0)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 3]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(3)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 2]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(2)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 5]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(5)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 4]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(4)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 7]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(7)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 6]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(6)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 1]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(1)), new SystemStreamPartition("kafka", "PVE", new Partition(5)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 2]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(2)), new SystemStreamPartition("kafka", "PVE", new Partition(6)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 3]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(3)), new SystemStreamPartition("kafka", "PVE", new Partition(7)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 0]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(0)), new SystemStreamPartition("kafka", "PVE", new Partition(4)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 1]"), ImmutableSet.of(new SystemStreamPartition("kafka", "URE", new Partition(1)), new SystemStreamPartition("kafka", "URE", new Partition(5)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 2]"), ImmutableSet.of(new SystemStreamPartition("kafka", "URE", new Partition(2)), new SystemStreamPartition("kafka", "URE", new Partition(6)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 3]"), ImmutableSet.of(new SystemStreamPartition("kafka", "URE", new Partition(3)), new SystemStreamPartition("kafka", "URE", new Partition(7)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 0]"), ImmutableSet.of(new SystemStreamPartition("kafka", "URE", new Partition(0)), new SystemStreamPartition("kafka", "URE", new Partition(4)))) .build(); Map<TaskName, Set<SystemStreamPartition>> expectedGroupingForStateless = ImmutableMap.<TaskName, Set<SystemStreamPartition>>builder() .put(new TaskName("SystemStreamPartition [kafka, BOB, 1]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(1)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 0]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(0)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 3]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(3)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 2]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(2)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 5]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(5)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 4]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(4)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 7]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(7)))) .put(new TaskName("SystemStreamPartition [kafka, BOB, 6]"), ImmutableSet.of(new SystemStreamPartition("kafka", "BOB", new Partition(6)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 1]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(1)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 2]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(2)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 3]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(3)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 0]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(0)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 4]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(4)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 5]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(5)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 6]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(6)))) .put(new TaskName("SystemStreamPartition [kafka, PVE, 7]"), ImmutableSet.of(new SystemStreamPartition("kafka", "PVE", new Partition(7)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 1]"), ImmutableSet.of(new SystemStreamPartition("kafka", "URE", new Partition(1)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 2]"), ImmutableSet.of(new SystemStreamPartition("kafka", "URE", new Partition(2)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 3]"), ImmutableSet.of(new SystemStreamPartition("kafka", "URE", new Partition(3)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 0]"), ImmutableSet.of(new SystemStreamPartition("kafka", "URE", new Partition(0)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 4]"), ImmutableSet.of(new SystemStreamPartition("kafka", "URE", new Partition(4)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 5]"), ImmutableSet.of(new SystemStreamPartition("kafka", "URE", new Partition(5)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 6]"), ImmutableSet.of(new SystemStreamPartition("kafka", "URE", new Partition(6)))) .put(new TaskName("SystemStreamPartition [kafka, URE, 7]"), ImmutableSet.of(new SystemStreamPartition("kafka", "URE", new Partition(7)))) .build(); // SSPGrouperProxy for stateful job SSPGrouperProxy groupBySystemStreamPartition = buildSspGrouperProxy(true); GrouperMetadata grouperMetadata = new GrouperMetadataImpl(new HashMap<>(), new HashMap<>(), prevGroupingWithMultipleStreams, new HashMap<>()); Map<TaskName, Set<SystemStreamPartition>> finalGrouping = groupBySystemStreamPartition.group(currSsps, grouperMetadata); Assert.assertEquals(expectedGroupingForStateful, finalGrouping); // SSPGrouperProxy for stateless job groupBySystemStreamPartition = buildSspGrouperProxy(false); finalGrouping = groupBySystemStreamPartition.group(currSsps, grouperMetadata); Assert.assertEquals(expectedGroupingForStateless, finalGrouping); } private static SSPGrouperProxy buildSspGrouperProxy(boolean forStatefulJob) { HashMap<String, String> configMap = new HashMap<>(); if (forStatefulJob) { configMap.put(String.format(StorageConfig.FACTORY, "test-store"), "TestStoreFactory"); } return new SSPGrouperProxy(new MapConfig(configMap), new GroupBySystemStreamPartition(new MapConfig())); } }
openjdk/jdk8
35,424
jdk/src/share/classes/sun/awt/image/PNGImageDecoder.java
/* * Copyright (c) 1999, 2010, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package sun.awt.image; import java.io.*; import java.util.*; import java.util.zip.*; import java.awt.image.*; import java.awt.Color; /** PNG - Portable Network Graphics - image file reader. See <a href=http://www.ietf.org/rfc/rfc2083.txt>RFC2083</a> for details. */ /* this is changed public class PNGImageDecoder extends FilterInputStream implements Runnable { */ public class PNGImageDecoder extends ImageDecoder { private static final int GRAY=0; private static final int PALETTE=1; private static final int COLOR=2; private static final int ALPHA=4; private static final int bKGDChunk = 0x624B4744; private static final int cHRMChunk = 0x6348524D; private static final int gAMAChunk = 0x67414D41; private static final int hISTChunk = 0x68495354; private static final int IDATChunk = 0x49444154; private static final int IENDChunk = 0x49454E44; private static final int IHDRChunk = 0x49484452; private static final int PLTEChunk = 0x504C5445; private static final int pHYsChunk = 0x70485973; private static final int sBITChunk = 0x73424954; private static final int tEXtChunk = 0x74455874; private static final int tIMEChunk = 0x74494D45; private static final int tRNSChunk = 0x74524E53; private static final int zTXtChunk = 0x7A545874; private int width; private int height; private int bitDepth; private int colorType; private int compressionMethod; private int filterMethod; private int interlaceMethod; private int gamma = 100000; private java.util.Hashtable properties; /* this is not needed ImageConsumer target; */ private ColorModel cm; private byte[] red_map, green_map, blue_map, alpha_map; private int transparentPixel = -1; private byte[] transparentPixel_16 = null; // we need 6 bytes to store 16bpp value private static ColorModel greyModels[] = new ColorModel[4]; /* this is not needed PNGImageDecoder next; */ private void property(String key,Object value) { if(value==null) return; if(properties==null) properties=new java.util.Hashtable(); properties.put(key,value); } private void property(String key,float value) { property(key,new Float(value)); } private final void pngassert(boolean b) throws IOException { if(!b) { PNGException e = new PNGException("Broken file"); e.printStackTrace(); throw e; } } protected boolean handleChunk(int key, byte[] buf, int st, int len) throws IOException { switch(key) { case bKGDChunk: Color c = null; switch(colorType) { case COLOR: case COLOR|ALPHA: pngassert(len==6); c = new Color(buf[st]&0xff,buf[st+2]&0xff,buf[st+4]&0xff); break; case COLOR|PALETTE: case COLOR|PALETTE|ALPHA: pngassert(len==1); int ix = buf[st]&0xFF; pngassert(red_map!=null && ix<red_map.length); c = new Color(red_map[ix]&0xff,green_map[ix]&0xff,blue_map[ix]&0xff); break; case GRAY: case GRAY|ALPHA: pngassert(len==2); int t = buf[st]&0xFF; c = new Color(t,t,t); break; } if(c!=null) property("background",c); break; case cHRMChunk: property("chromaticities", new Chromaticities( getInt(st), getInt(st+4), getInt(st+8), getInt(st+12), getInt(st+16), getInt(st+20), getInt(st+24), getInt(st+28))); break; case gAMAChunk: if(len!=4) throw new PNGException("bogus gAMA"); gamma = getInt(st); if(gamma!=100000) property("gamma",gamma/100000.0f); break; case hISTChunk: break; case IDATChunk: return false; case IENDChunk: break; case IHDRChunk: if(len!=13 ||(width = getInt(st))==0 ||(height = getInt(st+4))==0 ) throw new PNGException("bogus IHDR"); bitDepth = getByte(st+8); colorType = getByte(st+9); compressionMethod = getByte(st+10); filterMethod = getByte(st+11); interlaceMethod = getByte(st+12); /* this is not needed if(target!=null) target.setDimensions(width,height); */ break; case PLTEChunk: { int tsize = len/3; red_map = new byte[tsize]; green_map = new byte[tsize]; blue_map = new byte[tsize]; for(int i=0,j=st; i<tsize; i++, j+=3) { red_map[i] = buf[j]; green_map[i] = buf[j+1]; blue_map[i] = buf[j+2]; } } break; case pHYsChunk: break; case sBITChunk: break; case tEXtChunk: int klen = 0; while(klen<len && buf[st+klen]!=0) klen++; if(klen<len) { String tkey = new String(buf,st,klen); String tvalue = new String(buf,st+klen+1,len-klen-1); property(tkey,tvalue); } break; case tIMEChunk: property("modtime",new GregorianCalendar( getShort(st+0), getByte(st+2)-1, getByte(st+3), getByte(st+4), getByte(st+5), getByte(st+6)).getTime()); break; case tRNSChunk: switch(colorType) { case PALETTE|COLOR: case PALETTE|COLOR|ALPHA: int alen = len; if(red_map!=null) alen = red_map.length; alpha_map = new byte[alen]; System.arraycopy(buf,st,alpha_map,0,len<alen ? len : alen); while (--alen>=len) alpha_map[alen] = (byte)0xFF; break; case COLOR: // doesn't deal with 16 bit colors properly case COLOR|ALPHA: // doesn't deal with 16 bit colors properly pngassert(len==6); if (bitDepth == 16) { transparentPixel_16 = new byte[6]; for (int i = 0; i < 6; i++) { transparentPixel_16[i] = (byte)getByte(st + i); } } else { transparentPixel = ((getShort(st + 0)&0xFF)<<16) | ((getShort(st + 2)&0xFF)<< 8) | ((getShort(st + 4)&0xFF) ); } break; case GRAY: // doesn't deal with 16 bit colors properly case GRAY|ALPHA: // doesn't deal with 16 bit colors properly pngassert(len==2); /* REMIND: Discarding the LSB for 16 bit depth here * means that the all pixels which match the MSB * will be treated as transparent. */ int t = getShort(st); t = 0xFF & ((bitDepth == 16) ? (t >> 8) : t); transparentPixel = (t<<16) | (t<< 8) | t; break; } break; case zTXtChunk: break; } return true; } public class PNGException extends IOException { PNGException(String s) { super(s); } } /* this is changed public void run() { */ public void produceImage() throws IOException, ImageFormatException { /* this is not needed ImageConsumer t = target; if(t!=null) try { */ try { for(int i=0; i<signature.length; i++) if((signature[i]&0xFF)!=underlyingInputStream.read()) throw new PNGException("Chunk signature mismatch"); InputStream is = new BufferedInputStream(new InflaterInputStream(inputStream,new Inflater())); getData(); byte[] bPixels = null; int[] wPixels = null; int pixSize = width; int rowStride; int logDepth = 0; switch(bitDepth) { case 1: logDepth = 0; break; case 2: logDepth = 1; break; case 4: logDepth = 2; break; case 8: logDepth = 3; break; case 16: logDepth = 4; break; default: throw new PNGException("invalid depth"); } if(interlaceMethod!=0) {pixSize *= height;rowStride=width;} else rowStride = 0; int combinedType = colorType|(bitDepth<<3); int bitMask = (1<<(bitDepth>=8?8:bitDepth))-1; //Figure out the color model switch(colorType) { case COLOR|PALETTE: case COLOR|PALETTE|ALPHA: if(red_map==null) throw new PNGException("palette expected"); if(alpha_map==null) cm = new IndexColorModel(bitDepth,red_map.length, red_map,green_map,blue_map); else cm = new IndexColorModel(bitDepth,red_map.length, red_map,green_map,blue_map,alpha_map); bPixels = new byte[pixSize]; break; case GRAY: { int llog = logDepth>=4 ? 3 : logDepth; if((cm=greyModels[llog]) == null) { int size = 1<<(1<<llog); byte ramp[] = new byte[size]; for(int i = 0; i<size; i++) ramp[i] = (byte)(255*i/(size-1)); if (transparentPixel == -1) { cm = new IndexColorModel(bitDepth,ramp.length,ramp,ramp,ramp); } else { cm = new IndexColorModel(bitDepth,ramp.length,ramp,ramp,ramp, (transparentPixel & 0xFF)); } greyModels[llog] = cm; } } bPixels = new byte[pixSize]; break; case COLOR: case COLOR|ALPHA: case GRAY|ALPHA: cm = ColorModel.getRGBdefault(); wPixels = new int[pixSize]; break; default: throw new PNGException("invalid color type"); } /* this is going to be set in the pixel store t.setColorModel(cm); t.setHints(interlaceMethod !=0 ? ImageConsumer.TOPDOWNLEFTRIGHT | ImageConsumer.COMPLETESCANLINES : ImageConsumer.TOPDOWNLEFTRIGHT | ImageConsumer.COMPLETESCANLINES | ImageConsumer.SINGLEPASS | ImageConsumer.SINGLEFRAME); */ // code added to make it work with ImageDecoder architecture setDimensions(width, height); setColorModel(cm); int flags = (interlaceMethod !=0 ? ImageConsumer.TOPDOWNLEFTRIGHT | ImageConsumer.COMPLETESCANLINES : ImageConsumer.TOPDOWNLEFTRIGHT | ImageConsumer.COMPLETESCANLINES | ImageConsumer.SINGLEPASS | ImageConsumer.SINGLEFRAME); setHints(flags); headerComplete(); // end of adding int samplesPerPixel = ((colorType&PALETTE)!=0 ? 1 : ((colorType&COLOR)!=0 ? 3 : 1)+((colorType&ALPHA)!=0?1:0)); int bitsPerPixel = samplesPerPixel*bitDepth; int bytesPerPixel = (bitsPerPixel+7)>>3; int pass, passLimit; if(interlaceMethod==0) { pass = -1; passLimit = 0; } else { pass = 0; passLimit = 7; } // These loops are far from being tuned. They're this way to make them easy to // debug. Tuning comes later. /* code changed. target not needed here while(++pass<=passLimit && (t=target)!=null) { */ while(++pass<=passLimit) { int row = startingRow[pass]; int rowInc = rowIncrement[pass]; int colInc = colIncrement[pass]; int bWidth = blockWidth[pass]; int bHeight = blockHeight[pass]; int sCol = startingCol[pass]; int rowPixelWidth = (width-sCol+(colInc-1))/colInc; int rowByteWidth = ((rowPixelWidth*bitsPerPixel)+7)>>3; if(rowByteWidth==0) continue; int pixelBufferInc = interlaceMethod==0 ? rowInc*width : 0; int rowOffset = rowStride*row; boolean firstRow = true; byte[] rowByteBuffer = new byte[rowByteWidth]; byte[] prevRowByteBuffer = new byte[rowByteWidth]; /* code changed. target not needed here while (row < height && (t=target)!=null) { */ while (row < height) { int rowFilter = is.read(); for (int rowFillPos=0;rowFillPos<rowByteWidth; ) { int n = is.read(rowByteBuffer,rowFillPos,rowByteWidth-rowFillPos); if(n<=0) throw new PNGException("missing data"); rowFillPos+=n; } filterRow(rowByteBuffer, firstRow ? null : prevRowByteBuffer, rowFilter, rowByteWidth, bytesPerPixel); int col = sCol; int spos=0; int pixel = 0; while (col < width) { if(wPixels !=null) { switch(combinedType) { case COLOR|ALPHA|(8<<3): wPixels[col+rowOffset] = ((rowByteBuffer[spos ]&0xFF)<<16) | ((rowByteBuffer[spos+1]&0xFF)<< 8) | ((rowByteBuffer[spos+2]&0xFF) ) | ((rowByteBuffer[spos+3]&0xFF)<<24); spos+=4; break; case COLOR|ALPHA|(16<<3): wPixels[col+rowOffset] = ((rowByteBuffer[spos ]&0xFF)<<16) | ((rowByteBuffer[spos+2]&0xFF)<< 8) | ((rowByteBuffer[spos+4]&0xFF) ) | ((rowByteBuffer[spos+6]&0xFF)<<24); spos+=8; break; case COLOR|(8<<3): pixel = ((rowByteBuffer[spos ]&0xFF)<<16) | ((rowByteBuffer[spos+1]&0xFF)<< 8) | ((rowByteBuffer[spos+2]&0xFF) ); if (pixel != transparentPixel) { pixel |= 0xff000000; } wPixels[col+rowOffset] = pixel; spos+=3; break; case COLOR|(16<<3): pixel = ((rowByteBuffer[spos ]&0xFF)<<16) | ((rowByteBuffer[spos+2]&0xFF)<< 8) | ((rowByteBuffer[spos+4]&0xFF) ); boolean isTransparent = (transparentPixel_16 != null); for (int i = 0; isTransparent && (i < 6); i++) { isTransparent &= (rowByteBuffer[spos + i] & 0xFF) == (transparentPixel_16[i] & 0xFF); } if (!isTransparent) { pixel |= 0xff000000; } wPixels[col+rowOffset] = pixel; spos+=6; break; case GRAY|ALPHA|(8<<3): { int tx = rowByteBuffer[spos]&0xFF; wPixels[col+rowOffset] = (tx<<16)|(tx<<8)|tx |((rowByteBuffer[spos+1]&0xFF)<<24); } spos+=2; break; case GRAY|ALPHA|(16<<3): { int tx = rowByteBuffer[spos]&0xFF; wPixels[col+rowOffset] = (tx<<16)|(tx<<8)|tx |((rowByteBuffer[spos+2]&0xFF)<<24); } spos+=4; break; default: throw new PNGException("illegal type/depth"); } } else switch(bitDepth) { case 1: bPixels[col+rowOffset] = (byte)((rowByteBuffer[spos>>3]>>(7-(spos&7)))&1); spos++; break; case 2: bPixels[col+rowOffset] = (byte)((rowByteBuffer[spos>>2]>>((3-(spos&3))*2))&3); spos++; break; case 4: bPixels[col+rowOffset] = (byte)((rowByteBuffer[spos>>1]>>((1-(spos&1))*4))&15); spos++; break; case 8: bPixels[col+rowOffset] = rowByteBuffer[spos++]; break; case 16: bPixels[col+rowOffset] = rowByteBuffer[spos]; spos+=2; break; default: throw new PNGException("illegal type/depth"); } /*visit (row, col, min (bHeight, height - row), min (bWidth, width - col)); */ col += colInc; } if(interlaceMethod==0) if(wPixels!=null) { /* code changed. target not needed here t.setPixels(0,row,width,1,cm,wPixels,0,width); */ // code added to make it work with ImageDecoder arch sendPixels(0,row,width,1,wPixels,0,width); // end of adding } else { /* code changed. target not needed here t.setPixels(0,row,width,1,cm,bPixels,0,width); */ // code added to make it work with ImageDecoder arch sendPixels(0,row,width,1,bPixels,0,width); //end of adding } row += rowInc; rowOffset += rowInc*rowStride; byte[] T = rowByteBuffer; rowByteBuffer = prevRowByteBuffer; prevRowByteBuffer = T; firstRow = false; } if(interlaceMethod!=0) if(wPixels!=null) { /* code changed. target not needed here t.setPixels(0,0,width,height,cm,wPixels,0,width); */ // code added to make it work with ImageDecoder arch sendPixels(0,0,width,height,wPixels,0,width); //end of adding } else { /* code changed. target not needed here t.setPixels(0,0,width,height,cm,bPixels,0,width); */ // code added to make it work with ImageDecoder arch sendPixels(0,0,width,height,bPixels,0,width); //end of adding } } /* Here, the function "visit(row,column,height,width)" obtains the next transmitted pixel and paints a rectangle of the specified height and width, whose upper-left corner is at the specified row and column, using the color indicated by the pixel. Note that row and column are measured from 0,0 at the upper left corner. */ /* code not needed, don't deal with target if((t=target)!=null) { if(properties!=null) t.setProperties(properties); t.imageComplete(ImageConsumer.STATICIMAGEDONE); */ imageComplete(ImageConsumer.STATICIMAGEDONE, true); /* code not needed } is.close(); */ } catch(IOException e) { if(!aborted) { /* code not needed if((t=target)!=null) { PNGEncoder.prChunk(e.toString(),inbuf,pos,limit-pos,true); */ property("error", e); /* code not needed t.setProperties(properties); t.imageComplete(ImageConsumer.IMAGEERROR|ImageConsumer.STATICIMAGEDONE); */ imageComplete(ImageConsumer.IMAGEERROR|ImageConsumer.STATICIMAGEDONE, true); throw e; } } finally { try { close(); } catch(Throwable e){} /* code not needed target = null; endTurn(); */ } } private boolean sendPixels(int x, int y, int w, int h, int[] pixels, int offset, int pixlength) { int count = setPixels(x, y, w, h, cm, pixels, offset, pixlength); if (count <= 0) { aborted = true; } return !aborted; } private boolean sendPixels(int x, int y, int w, int h, byte[] pixels, int offset, int pixlength) { int count = setPixels(x, y, w, h, cm, pixels, offset, pixlength); if (count <= 0) { aborted = true; } return !aborted; } private void filterRow(byte rowByteBuffer[], byte[] prevRow, int rowFilter, int rowByteWidth, int bytesPerSample) throws IOException { int x = 0; switch (rowFilter) { case 0: break; case 1: for (x = bytesPerSample; x < rowByteWidth; x++) rowByteBuffer[x] += rowByteBuffer[x - bytesPerSample]; break; case 2: if (prevRow != null) for ( ; x < rowByteWidth; x++) rowByteBuffer[x] += prevRow[x]; break; case 3: if (prevRow != null) { for ( ; x < bytesPerSample; x++) rowByteBuffer[x] += (0xff & prevRow[x])>>1; for ( ; x < rowByteWidth; x++) rowByteBuffer[x] += ((prevRow[x]&0xFF) + (rowByteBuffer[x - bytesPerSample]&0xFF))>>1; } else for (x = bytesPerSample; x < rowByteWidth; x++) rowByteBuffer[x] += (rowByteBuffer[x - bytesPerSample]&0xFF)>>1; break; case 4: if (prevRow != null) { for ( ; x < bytesPerSample; x++) rowByteBuffer[x] += prevRow[x]; for ( ; x < rowByteWidth; x++) { int a, b, c, p, pa, pb, pc, rval; a = rowByteBuffer[x - bytesPerSample]&0xFF; b = prevRow[x]&0xFF; c = prevRow[x - bytesPerSample]&0xFF; p = a + b - c; pa = p > a ? p - a : a - p; pb = p > b ? p - b : b - p; pc = p > c ? p - c : c - p; rowByteBuffer[x] += (pa <= pb) && (pa <= pc) ? a : pb <= pc ? b : c; } } else for (x = bytesPerSample; x < rowByteWidth; x++) rowByteBuffer[x] += rowByteBuffer[x - bytesPerSample]; break; default: throw new PNGException("Illegal filter"); } } private static final byte[] startingRow = { 0, 0, 0, 4, 0, 2, 0, 1 }; private static final byte[] startingCol = { 0, 0, 4, 0, 2, 0, 1, 0 }; private static final byte[] rowIncrement = { 1, 8, 8, 8, 4, 4, 2, 2 }; private static final byte[] colIncrement = { 1, 8, 8, 4, 4, 2, 2, 1 }; private static final byte[] blockHeight = { 1, 8, 8, 4, 4, 2, 2, 1 }; private static final byte[] blockWidth = { 1, 8, 4, 4, 2, 2, 1, 1 }; //abstract public class ChunkReader extends FilterInputStream { int pos, limit; int chunkStart; int chunkKey, chunkLength, chunkCRC; boolean seenEOF; private static final byte[] signature = { (byte) 137, (byte) 80, (byte) 78, (byte) 71, (byte) 13, (byte) 10, (byte) 26, (byte) 10 }; PNGFilterInputStream inputStream; InputStream underlyingInputStream; /* code changed public PNGImageDecoder(InputStream in, ImageConsumer t) throws IOException { */ public PNGImageDecoder(InputStreamImageSource src, InputStream input) throws IOException { // code added super(src, input); inputStream = new PNGFilterInputStream(this, input); underlyingInputStream = inputStream.underlyingInputStream; // end of adding /* code changed super(in); target = t; waitTurn(); new Thread(this).start(); */ } /* code changed to make it work with ImageDecoder architecture static int ThreadLimit = 10; private synchronized static void waitTurn() { try { while(ThreadLimit<=0) PNGImageDecoder.class.wait(1000); } catch(InterruptedException e){} ThreadLimit--; } private synchronized static void endTurn() { if(ThreadLimit<=0) PNGImageDecoder.class.notify(); ThreadLimit++; } */ byte[] inbuf = new byte[4096]; private void fill() throws IOException { if(!seenEOF) { if(pos>0 && pos<limit) { System.arraycopy(inbuf,pos,inbuf,0,limit-pos); limit = limit-pos; pos = 0; } else if(pos>=limit) { pos = 0; limit = 0; } int bsize = inbuf.length; while(limit<bsize) { int n = underlyingInputStream.read(inbuf,limit,bsize-limit); if(n<=0) { seenEOF=true; break; } limit += n; } } } private boolean need(int n) throws IOException { if(limit-pos>=n) return true; fill(); if(limit-pos>=n) return true; if(seenEOF) return false; byte nin[] = new byte[n+100]; System.arraycopy(inbuf,pos,nin,0,limit-pos); limit = limit-pos; pos = 0; inbuf = nin; fill(); return limit-pos>=n; } private final int getInt(int pos) { return ((inbuf[pos ]&0xFF)<<24) | ((inbuf[pos+1]&0xFF)<<16) | ((inbuf[pos+2]&0xFF)<< 8) | ((inbuf[pos+3]&0xFF) ); } private final int getShort(int pos) { return (short)(((inbuf[pos ]&0xFF)<<8) | ((inbuf[pos+1]&0xFF) )); } private final int getByte(int pos) { return inbuf[pos]&0xFF; } private final boolean getChunk() throws IOException { chunkLength = 0; if (!need(8)) return false; chunkLength = getInt(pos); chunkKey = getInt(pos+4); if(chunkLength<0) throw new PNGException("bogus length: "+chunkLength); if (!need(chunkLength+12)) return false; chunkCRC = getInt(pos+8+chunkLength); chunkStart = pos+8; int calcCRC = crc(inbuf,pos+4,chunkLength+4); if(chunkCRC!=calcCRC && checkCRC) throw new PNGException("crc corruption"); pos+=chunkLength+12; return true; } private void readAll() throws IOException { while(getChunk()) handleChunk(chunkKey,inbuf,chunkStart,chunkLength); } boolean getData() throws IOException { while(chunkLength==0 && getChunk()) if(handleChunk(chunkKey,inbuf,chunkStart,chunkLength)) chunkLength = 0; return chunkLength>0; } //abstract protected boolean handleChunk(int key, byte[] buf, int st, int len) // throws IOException; private static boolean checkCRC = true; public static boolean getCheckCRC() { return checkCRC; } public static void setCheckCRC(boolean c) { checkCRC = c; } protected void wrc(int c) { c = c&0xFF; if(c<=' '||c>'z') c = '?'; System.out.write(c); } protected void wrk(int n) { wrc(n>>24); wrc(n>>16); wrc(n>>8); wrc(n); } public void print() { wrk(chunkKey); System.out.print(" "+chunkLength+"\n"); } /* Table of CRCs of all 8-bit messages. */ private static final int[] crc_table = new int[256]; /* Make the table for a fast CRC. */ static { for (int n = 0; n < 256; n++) { int c = n; for (int k = 0; k < 8; k++) if ((c & 1) != 0) c = 0xedb88320 ^ (c >>> 1); else c = c >>> 1; crc_table[n] = c; } } /* Update a running CRC with the bytes buf[0..len-1]--the CRC should be initialized to all 1's, and the transmitted value is the 1's complement of the final running CRC (see the crc() routine below)). */ static private int update_crc(int crc, byte[] buf, int offset, int len) { int c = crc; while (--len>=0) c = crc_table[(c ^ buf[offset++]) & 0xff] ^ (c >>> 8); return c; } /* Return the CRC of the bytes buf[0..len-1]. */ static private int crc(byte[] buf, int offset, int len) { return update_crc(0xffffffff, buf, offset, len) ^ 0xffffffff; } public static class Chromaticities { public float whiteX, whiteY, redX, redY, greenX, greenY, blueX, blueY; Chromaticities(int wx, int wy, int rx, int ry, int gx, int gy, int bx, int by) { whiteX = wx/100000.0f; whiteY = wy/100000.0f; redX = rx/100000.0f; redY = ry/100000.0f; greenX = gx/100000.0f; greenY = gy/100000.0f; blueX = bx/100000.0f; blueY = by/100000.0f; } public String toString() { return "Chromaticities(white="+whiteX+","+whiteY+";red="+ redX+","+redY+";green="+ greenX+","+greenY+";blue="+ blueX+","+blueY+")"; } } } // the following class are added to make it work with ImageDecoder architecture class PNGFilterInputStream extends FilterInputStream { PNGImageDecoder owner; public InputStream underlyingInputStream; public PNGFilterInputStream(PNGImageDecoder owner, InputStream is) { super(is); underlyingInputStream = in; this.owner = owner; } public int available() throws IOException { return owner.limit-owner.pos+in.available();} public boolean markSupported() { return false; } public int read() throws IOException { if(owner.chunkLength<=0) if(!owner.getData()) return -1; owner.chunkLength--; return owner.inbuf[owner.chunkStart++]&0xFF; } public int read(byte[] b) throws IOException{return read(b,0,b.length);} public int read(byte[] b, int st, int len) throws IOException { if(owner.chunkLength<=0) if(!owner.getData()) return -1; if(owner.chunkLength<len) len = owner.chunkLength; System.arraycopy(owner.inbuf,owner.chunkStart,b,st,len); owner.chunkLength-=len; owner.chunkStart+=len; return len; } public long skip(long n) throws IOException { int i; for(i = 0; i<n && read()>=0; i++); return i; } }
googleapis/google-cloud-java
35,083
java-dataplex/proto-google-cloud-dataplex-v1/src/main/java/com/google/cloud/dataplex/v1/ListContentResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dataplex/v1/content.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dataplex.v1; /** * * * <pre> * List content response. * </pre> * * Protobuf type {@code google.cloud.dataplex.v1.ListContentResponse} */ public final class ListContentResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dataplex.v1.ListContentResponse) ListContentResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListContentResponse.newBuilder() to construct. private ListContentResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListContentResponse() { content_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListContentResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataplex.v1.ContentProto .internal_static_google_cloud_dataplex_v1_ListContentResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataplex.v1.ContentProto .internal_static_google_cloud_dataplex_v1_ListContentResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataplex.v1.ListContentResponse.class, com.google.cloud.dataplex.v1.ListContentResponse.Builder.class); } public static final int CONTENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.dataplex.v1.Content> content_; /** * * * <pre> * Content under the given parent lake. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Content content = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.dataplex.v1.Content> getContentList() { return content_; } /** * * * <pre> * Content under the given parent lake. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Content content = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.dataplex.v1.ContentOrBuilder> getContentOrBuilderList() { return content_; } /** * * * <pre> * Content under the given parent lake. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Content content = 1;</code> */ @java.lang.Override public int getContentCount() { return content_.size(); } /** * * * <pre> * Content under the given parent lake. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Content content = 1;</code> */ @java.lang.Override public com.google.cloud.dataplex.v1.Content getContent(int index) { return content_.get(index); } /** * * * <pre> * Content under the given parent lake. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Content content = 1;</code> */ @java.lang.Override public com.google.cloud.dataplex.v1.ContentOrBuilder getContentOrBuilder(int index) { return content_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < content_.size(); i++) { output.writeMessage(1, content_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < content_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, content_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dataplex.v1.ListContentResponse)) { return super.equals(obj); } com.google.cloud.dataplex.v1.ListContentResponse other = (com.google.cloud.dataplex.v1.ListContentResponse) obj; if (!getContentList().equals(other.getContentList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getContentCount() > 0) { hash = (37 * hash) + CONTENT_FIELD_NUMBER; hash = (53 * hash) + getContentList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dataplex.v1.ListContentResponse parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataplex.v1.ListContentResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataplex.v1.ListContentResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataplex.v1.ListContentResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataplex.v1.ListContentResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataplex.v1.ListContentResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataplex.v1.ListContentResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataplex.v1.ListContentResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataplex.v1.ListContentResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dataplex.v1.ListContentResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataplex.v1.ListContentResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataplex.v1.ListContentResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.dataplex.v1.ListContentResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * List content response. * </pre> * * Protobuf type {@code google.cloud.dataplex.v1.ListContentResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dataplex.v1.ListContentResponse) com.google.cloud.dataplex.v1.ListContentResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataplex.v1.ContentProto .internal_static_google_cloud_dataplex_v1_ListContentResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataplex.v1.ContentProto .internal_static_google_cloud_dataplex_v1_ListContentResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataplex.v1.ListContentResponse.class, com.google.cloud.dataplex.v1.ListContentResponse.Builder.class); } // Construct using com.google.cloud.dataplex.v1.ListContentResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (contentBuilder_ == null) { content_ = java.util.Collections.emptyList(); } else { content_ = null; contentBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dataplex.v1.ContentProto .internal_static_google_cloud_dataplex_v1_ListContentResponse_descriptor; } @java.lang.Override public com.google.cloud.dataplex.v1.ListContentResponse getDefaultInstanceForType() { return com.google.cloud.dataplex.v1.ListContentResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dataplex.v1.ListContentResponse build() { com.google.cloud.dataplex.v1.ListContentResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dataplex.v1.ListContentResponse buildPartial() { com.google.cloud.dataplex.v1.ListContentResponse result = new com.google.cloud.dataplex.v1.ListContentResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.dataplex.v1.ListContentResponse result) { if (contentBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { content_ = java.util.Collections.unmodifiableList(content_); bitField0_ = (bitField0_ & ~0x00000001); } result.content_ = content_; } else { result.content_ = contentBuilder_.build(); } } private void buildPartial0(com.google.cloud.dataplex.v1.ListContentResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dataplex.v1.ListContentResponse) { return mergeFrom((com.google.cloud.dataplex.v1.ListContentResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dataplex.v1.ListContentResponse other) { if (other == com.google.cloud.dataplex.v1.ListContentResponse.getDefaultInstance()) return this; if (contentBuilder_ == null) { if (!other.content_.isEmpty()) { if (content_.isEmpty()) { content_ = other.content_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureContentIsMutable(); content_.addAll(other.content_); } onChanged(); } } else { if (!other.content_.isEmpty()) { if (contentBuilder_.isEmpty()) { contentBuilder_.dispose(); contentBuilder_ = null; content_ = other.content_; bitField0_ = (bitField0_ & ~0x00000001); contentBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getContentFieldBuilder() : null; } else { contentBuilder_.addAllMessages(other.content_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.dataplex.v1.Content m = input.readMessage( com.google.cloud.dataplex.v1.Content.parser(), extensionRegistry); if (contentBuilder_ == null) { ensureContentIsMutable(); content_.add(m); } else { contentBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.dataplex.v1.Content> content_ = java.util.Collections.emptyList(); private void ensureContentIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { content_ = new java.util.ArrayList<com.google.cloud.dataplex.v1.Content>(content_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.dataplex.v1.Content, com.google.cloud.dataplex.v1.Content.Builder, com.google.cloud.dataplex.v1.ContentOrBuilder> contentBuilder_; /** * * * <pre> * Content under the given parent lake. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Content content = 1;</code> */ public java.util.List<com.google.cloud.dataplex.v1.Content> getContentList() { if (contentBuilder_ == null) { return java.util.Collections.unmodifiableList(content_); } else { return contentBuilder_.getMessageList(); } } /** * * * <pre> * Content under the given parent lake. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Content content = 1;</code> */ public int getContentCount() { if (contentBuilder_ == null) { return content_.size(); } else { return contentBuilder_.getCount(); } } /** * * * <pre> * Content under the given parent lake. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Content content = 1;</code> */ public com.google.cloud.dataplex.v1.Content getContent(int index) { if (contentBuilder_ == null) { return content_.get(index); } else { return contentBuilder_.getMessage(index); } } /** * * * <pre> * Content under the given parent lake. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Content content = 1;</code> */ public Builder setContent(int index, com.google.cloud.dataplex.v1.Content value) { if (contentBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureContentIsMutable(); content_.set(index, value); onChanged(); } else { contentBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Content under the given parent lake. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Content content = 1;</code> */ public Builder setContent( int index, com.google.cloud.dataplex.v1.Content.Builder builderForValue) { if (contentBuilder_ == null) { ensureContentIsMutable(); content_.set(index, builderForValue.build()); onChanged(); } else { contentBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Content under the given parent lake. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Content content = 1;</code> */ public Builder addContent(com.google.cloud.dataplex.v1.Content value) { if (contentBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureContentIsMutable(); content_.add(value); onChanged(); } else { contentBuilder_.addMessage(value); } return this; } /** * * * <pre> * Content under the given parent lake. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Content content = 1;</code> */ public Builder addContent(int index, com.google.cloud.dataplex.v1.Content value) { if (contentBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureContentIsMutable(); content_.add(index, value); onChanged(); } else { contentBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Content under the given parent lake. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Content content = 1;</code> */ public Builder addContent(com.google.cloud.dataplex.v1.Content.Builder builderForValue) { if (contentBuilder_ == null) { ensureContentIsMutable(); content_.add(builderForValue.build()); onChanged(); } else { contentBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Content under the given parent lake. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Content content = 1;</code> */ public Builder addContent( int index, com.google.cloud.dataplex.v1.Content.Builder builderForValue) { if (contentBuilder_ == null) { ensureContentIsMutable(); content_.add(index, builderForValue.build()); onChanged(); } else { contentBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Content under the given parent lake. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Content content = 1;</code> */ public Builder addAllContent( java.lang.Iterable<? extends com.google.cloud.dataplex.v1.Content> values) { if (contentBuilder_ == null) { ensureContentIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, content_); onChanged(); } else { contentBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Content under the given parent lake. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Content content = 1;</code> */ public Builder clearContent() { if (contentBuilder_ == null) { content_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { contentBuilder_.clear(); } return this; } /** * * * <pre> * Content under the given parent lake. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Content content = 1;</code> */ public Builder removeContent(int index) { if (contentBuilder_ == null) { ensureContentIsMutable(); content_.remove(index); onChanged(); } else { contentBuilder_.remove(index); } return this; } /** * * * <pre> * Content under the given parent lake. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Content content = 1;</code> */ public com.google.cloud.dataplex.v1.Content.Builder getContentBuilder(int index) { return getContentFieldBuilder().getBuilder(index); } /** * * * <pre> * Content under the given parent lake. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Content content = 1;</code> */ public com.google.cloud.dataplex.v1.ContentOrBuilder getContentOrBuilder(int index) { if (contentBuilder_ == null) { return content_.get(index); } else { return contentBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Content under the given parent lake. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Content content = 1;</code> */ public java.util.List<? extends com.google.cloud.dataplex.v1.ContentOrBuilder> getContentOrBuilderList() { if (contentBuilder_ != null) { return contentBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(content_); } } /** * * * <pre> * Content under the given parent lake. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Content content = 1;</code> */ public com.google.cloud.dataplex.v1.Content.Builder addContentBuilder() { return getContentFieldBuilder() .addBuilder(com.google.cloud.dataplex.v1.Content.getDefaultInstance()); } /** * * * <pre> * Content under the given parent lake. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Content content = 1;</code> */ public com.google.cloud.dataplex.v1.Content.Builder addContentBuilder(int index) { return getContentFieldBuilder() .addBuilder(index, com.google.cloud.dataplex.v1.Content.getDefaultInstance()); } /** * * * <pre> * Content under the given parent lake. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Content content = 1;</code> */ public java.util.List<com.google.cloud.dataplex.v1.Content.Builder> getContentBuilderList() { return getContentFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.dataplex.v1.Content, com.google.cloud.dataplex.v1.Content.Builder, com.google.cloud.dataplex.v1.ContentOrBuilder> getContentFieldBuilder() { if (contentBuilder_ == null) { contentBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.dataplex.v1.Content, com.google.cloud.dataplex.v1.Content.Builder, com.google.cloud.dataplex.v1.ContentOrBuilder>( content_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); content_ = null; } return contentBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dataplex.v1.ListContentResponse) } // @@protoc_insertion_point(class_scope:google.cloud.dataplex.v1.ListContentResponse) private static final com.google.cloud.dataplex.v1.ListContentResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dataplex.v1.ListContentResponse(); } public static com.google.cloud.dataplex.v1.ListContentResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListContentResponse> PARSER = new com.google.protobuf.AbstractParser<ListContentResponse>() { @java.lang.Override public ListContentResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListContentResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListContentResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dataplex.v1.ListContentResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/incubator-kie-drools
35,429
drools-compiler/src/main/java/org/drools/compiler/kie/builder/impl/KieContainerImpl.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.drools.compiler.kie.builder.impl; import java.io.InputStream; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import javax.management.ObjectName; import org.drools.base.RuleBase; import org.drools.compiler.builder.InternalKnowledgeBuilder; import org.drools.compiler.builder.impl.KnowledgeBuilderConfigurationImpl; import org.drools.compiler.kie.builder.MaterializedLambda; import org.drools.compiler.kie.util.KieJarChangeSet; import org.drools.compiler.kproject.models.KieBaseModelImpl; import org.drools.compiler.kproject.models.KieSessionModelImpl; import org.drools.compiler.management.KieContainerMonitor; import org.drools.core.SessionConfiguration; import org.drools.core.impl.InternalKieContainer; import org.drools.core.management.DroolsManagementAgent; import org.drools.core.management.DroolsManagementAgent.CBSKey; import org.drools.core.reteoo.RuntimeComponentFactory; import org.drools.kiesession.rulebase.InternalKnowledgeBase; import org.drools.kiesession.session.StatefulKnowledgeSessionImpl; import org.drools.kiesession.session.StatefulSessionPool; import org.drools.kiesession.session.StatelessKnowledgeSessionImpl; import org.drools.util.ClassUtils; import org.drools.wiring.api.classloader.ProjectClassLoader; import org.kie.api.KieBase; import org.kie.api.KieBaseConfiguration; import org.kie.api.KieServices; import org.kie.api.builder.KieModule; import org.kie.api.builder.KieRepository; import org.kie.api.builder.Message; import org.kie.api.builder.Message.Level; import org.kie.api.builder.ReleaseId; import org.kie.api.builder.Results; import org.kie.api.builder.model.FileLoggerModel; import org.kie.api.builder.model.KieBaseModel; import org.kie.api.builder.model.KieSessionModel; import org.kie.api.conf.MBeansOption; import org.kie.api.event.KieRuntimeEventManager; import org.kie.api.internal.utils.KieService; import org.kie.api.io.ResourceType; import org.kie.api.logger.KieLoggers; import org.kie.api.runtime.Environment; import org.kie.api.runtime.KieContainerSessionsPool; import org.kie.api.runtime.KieSession; import org.kie.api.runtime.KieSessionConfiguration; import org.kie.api.runtime.StatelessKieSession; import org.kie.api.time.Calendar; import org.kie.internal.builder.ChangeType; import org.kie.internal.builder.KnowledgeBuilderFactory; import org.kie.internal.builder.ResourceChange; import org.kie.internal.builder.ResourceChangeSet; import org.kie.internal.builder.conf.AlphaNetworkCompilerOption; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static java.util.stream.Collectors.toList; import static org.drools.base.util.Drools.isJndiAvailable; import static org.drools.compiler.kie.util.InjectionHelper.wireSessionComponents; import static org.drools.util.ClassUtils.convertResourceToClassName; public class KieContainerImpl implements InternalKieContainer { private static final Logger log = LoggerFactory.getLogger( KieContainerImpl.class ); private KieProject kProject; private final Map<String, KieBase> kBases = new ConcurrentHashMap<>(); private final Map<String, KieSession> kSessions = new ConcurrentHashMap<>(); private final Map<String, StatelessKieSession> statelessKSessions = new ConcurrentHashMap<>(); private final KieRepository kr; private ReleaseId configuredReleaseId; private ReleaseId containerReleaseId; private final String containerId; private final Map<String, KieSessionConfiguration> sessionConfsCache = new ConcurrentHashMap<>(); public KieModule getMainKieModule() { return kr.getKieModule(getReleaseId()); } /** * Please note: the recommended way of getting a KieContainer is relying on {@link org.kie.api.KieServices KieServices} API, * for example: {@link org.kie.api.KieServices#newKieContainer(ReleaseId) KieServices.newKieContainer(...)}. * The direct manual call to KieContainerImpl constructor instead would not guarantee the consistency of the supplied containerId. */ public KieContainerImpl(KieProject kProject, KieRepository kr) { this("impl"+UUID.randomUUID(), kProject, kr); } /** * Please note: the recommended way of getting a KieContainer is relying on {@link org.kie.api.KieServices KieServices} API, * for example: {@link org.kie.api.KieServices#newKieContainer(ReleaseId) KieServices.newKieContainer(...)}. * The direct manual call to KieContainerImpl constructor instead would not guarantee the consistency of the supplied containerId. */ public KieContainerImpl(KieProject kProject, KieRepository kr, ReleaseId containerReleaseId) { this("impl"+UUID.randomUUID(), kProject, kr, containerReleaseId); } /** * Please note: the recommended way of getting a KieContainer is relying on {@link org.kie.api.KieServices KieServices} API, * for example: {@link org.kie.api.KieServices#newKieContainer(ReleaseId) KieServices.newKieContainer(...)}. * The direct manual call to KieContainerImpl constructor instead would not guarantee the consistency of the supplied containerId. */ public KieContainerImpl(String containerId, KieProject kProject, KieRepository kr) { this.kr = kr; this.kProject = kProject; this.containerId = containerId; kProject.init(); initMBeans(containerId); } /** * Please note: the recommended way of getting a KieContainer is relying on {@link org.kie.api.KieServices KieServices} API, * for example: {@link org.kie.api.KieServices#newKieContainer(ReleaseId) KieServices.newKieContainer(...)}. * The direct manual call to KieContainerImpl constructor instead would not guarantee the consistency of the supplied containerId. */ public KieContainerImpl(String containerId, KieProject kProject, KieRepository kr, ReleaseId containerReleaseId) { this(containerId, kProject, kr); this.configuredReleaseId = containerReleaseId; this.containerReleaseId = containerReleaseId; } private void initMBeans(String containerId) { if ( isMBeanOptionEnabled() ) { KieContainerMonitor monitor = new KieContainerMonitor(this); ObjectName on = DroolsManagementAgent.createObjectNameBy(containerId); DroolsManagementAgent.getInstance().registerMBean( this, monitor, on ); } } @Override public String getContainerId() { return this.containerId; } @Override public ReleaseId getConfiguredReleaseId() { return configuredReleaseId; } @Override public ReleaseId getResolvedReleaseId() { return getReleaseId(); } public ReleaseId getReleaseId() { return kProject.getGAV(); } public InputStream getPomAsStream() { return kProject.getPomAsStream(); } public long getCreationTimestamp() { return kProject.getCreationTimestamp(); } @Override public ReleaseId getContainerReleaseId() { return containerReleaseId != null ? containerReleaseId : getReleaseId(); } public Results updateToVersion(ReleaseId newReleaseId) { checkNotClasspathKieProject(); Results results = update(((KieModuleKieProject) kProject).getInternalKieModule(), newReleaseId); if (results != null) { containerReleaseId = newReleaseId; } else { results = new ResultsImpl(); ( (ResultsImpl) results ).addMessage( Message.Level.ERROR, null, "Cannot find KieModule with ReleaseId: " + newReleaseId ); } return results; } public Results updateToKieModule(InternalKieModule newKM) { checkNotClasspathKieProject(); Results results = update(((KieModuleKieProject) kProject).getInternalKieModule(), newKM); containerReleaseId = newKM.getReleaseId(); return results; } public Results updateDependencyToVersion(ReleaseId currentReleaseId, ReleaseId newReleaseId) { ReleaseId installedReleaseId = getReleaseId(); if (currentReleaseId.getGroupId().equals(installedReleaseId.getGroupId()) && currentReleaseId.getArtifactId().equals(installedReleaseId.getArtifactId())) { // upgrading the kProject itself: taking the kmodule from there return updateToVersion(newReleaseId); } checkNotClasspathKieProject(); // upgrading a transitive dependency: taking the kmodule from the krepo // if the new and the current release are equal (a snapshot) check if there is an older version with the same releaseId InternalKieModule currentKM = currentReleaseId.equals(newReleaseId) ? (InternalKieModule) ((KieRepositoryImpl) kr).getOldKieModule(currentReleaseId) : (InternalKieModule) kr.getKieModule(currentReleaseId); return update(currentKM, newReleaseId); } private void checkNotClasspathKieProject() { if( kProject instanceof ClasspathKieProject) { throw new UnsupportedOperationException( "It is not possible to update a classpath container to a new version." ); } } private Results update(final InternalKieModule currentKM, final ReleaseId newReleaseId) { final InternalKieModule newKM = (InternalKieModule) kr.getKieModule( newReleaseId ); return newKM == null ? null : update( currentKM, newKM ); } private Results update( InternalKieModule currentKM, InternalKieModule newKM ) { final KieJarChangeSet cs = currentKM.getChanges( newKM ); List<String> modifiedClassNames = getModifiedClasses(cs); final boolean modifyingUsedClass = isModifyingUsedClass( modifiedClassNames, getClassLoader() ) || isModifyingUsedFunction(cs); final Collection<Class<?>> modifiedClasses = reinitModifiedClasses( newKM, modifiedClassNames, getClassLoader(), modifyingUsedClass ); final Collection<String> unchangedResources = getUnchangedResources( newKM, cs ); Map<String, KieBaseModel> currentKieBaseModels = ((KieModuleKieProject ) kProject).updateToModule( newKM ); final ResultsImpl results = new ResultsImpl(); currentKM.updateKieModule(newKM); List<String> kbasesToRemove = new ArrayList<>(); for ( Entry<String, KieBase> kBaseEntry : kBases.entrySet() ) { String kbaseName = kBaseEntry.getKey(); KieBaseModelImpl newKieBaseModel = (KieBaseModelImpl) kProject.getKieBaseModel( kbaseName ); KieBaseModelImpl currentKieBaseModel = (KieBaseModelImpl) currentKieBaseModels.get( kbaseName ); // if a kbase no longer exists, just remove it from the cache if ( newKieBaseModel == null ) { // have to save for later removal to avoid iteration errors kbasesToRemove.add( kbaseName ); } else { final InternalKnowledgeBase kBase = (InternalKnowledgeBase) kBaseEntry.getValue(); // share Knowledge Builder among updater as it's computationally expensive to create this KnowledgeBuilderConfigurationImpl builderConfiguration = (KnowledgeBuilderConfigurationImpl) newKM.createBuilderConfiguration(newKieBaseModel, kBase.getRootClassLoader()); InternalKnowledgeBuilder kbuilder = (InternalKnowledgeBuilder) KnowledgeBuilderFactory.newKnowledgeBuilder(kBase, builderConfiguration); KieBaseUpdaterImplContext context = new KieBaseUpdaterImplContext(kProject, kBase, currentKM, newKM, cs, modifiedClasses, modifyingUsedClass, unchangedResources, results, newKieBaseModel, currentKieBaseModel, kbuilder); // Multiple updaters are required to be merged together in a single Runnable // to avoid a deadlock while using .fireUntilHalt() // see IncrementalCompilationTest.testMultipleIncrementalCompilationsWithFireUntilHalt // with multiple updaters (such as Alpha NetworkCompilerUpdater) CompositeRunnable compositeUpdater = new CompositeRunnable(); KieBaseUpdater kieBaseUpdater = currentKM.createKieBaseUpdater(context); compositeUpdater.add(kieBaseUpdater); KieBaseUpdaterOptions kieBaseUpdaterOptions = new KieBaseUpdaterOptions(new KieBaseUpdaterOptions.OptionEntry( AlphaNetworkCompilerOption.class, builderConfiguration.getOption(AlphaNetworkCompilerOption.KEY))); KieBaseUpdaters updaters = KieService.load(KieBaseUpdaters.class); updaters.getChildren() .stream() .map(kbu -> kbu.create(new KieBaseUpdatersContext(kieBaseUpdaterOptions, context.kBase.getRete(), context.kBase.getRootClassLoader() ))) .forEach(compositeUpdater::add); kBase.enqueueModification(compositeUpdater); } } for (String kbaseToRemove : kbasesToRemove) { kBases.remove(kbaseToRemove); } // remove sessions that no longer exist this.kSessions.entrySet().removeIf( ksession -> kProject.getKieSessionModel( ksession.getKey() ) == null ); this.statelessKSessions.entrySet().removeIf( ksession -> kProject.getKieSessionModel( ksession.getKey() ) == null ); return results; } public static class CompositeRunnable implements Runnable { private final List<Runnable> runnables = new ArrayList<>(); public void add(Runnable runnable) { runnables.add( runnable ); } void addAll(List<Runnable> runnableList) { runnables.addAll( runnableList ); } @Override public void run() { runnables.forEach( Runnable::run ); } } private boolean isModifyingUsedFunction(KieJarChangeSet cs) { return cs.getChanges().values() .stream() .flatMap(resourceChangeSet -> resourceChangeSet.getChanges().stream()) .anyMatch(change -> change.getType() == ResourceChange.Type.FUNCTION && change.getChangeType() == ChangeType.UPDATED); } private Collection<String> getUnchangedResources( InternalKieModule newKM, KieJarChangeSet cs ) { List<String> dslFiles = new ArrayList<>(); for (String file : newKM.getFileNames()) { if ( includeIfUnchanged( file ) && !cs.contains( file ) ) { dslFiles.add(file); } } return dslFiles; } private static final ResourceType[] TYPES_TO_BE_INCLUDED = new ResourceType[] { ResourceType.DSL, ResourceType.GDRL }; private boolean includeIfUnchanged( String file ) { for (ResourceType type : TYPES_TO_BE_INCLUDED ) { if (type.matchesExtension( file )) { return true; } } return false; } private boolean isModifyingUsedClass( List<String> modifiedClasses, ClassLoader classLoader ) { return modifiedClasses.stream().anyMatch( c -> isClassInUse( classLoader, convertResourceToClassName(c) ) ); } private boolean isClassInUse(ClassLoader rootClassLoader, String className) { return !(rootClassLoader instanceof ProjectClassLoader) || ((ProjectClassLoader) rootClassLoader).isClassInUse(className, MaterializedLambda.class); } private Collection<Class<?>> reinitModifiedClasses( InternalKieModule newKM, List<String> modifiedClasses, ClassLoader classLoader, boolean modifyingUsedClass ) { if (modifiedClasses.isEmpty() || !(classLoader instanceof ProjectClassLoader)) { return Collections.emptyList(); } Set<String> reloadedClasses = new HashSet<>(modifiedClasses); ProjectClassLoader projectClassLoader = (ProjectClassLoader) classLoader; projectClassLoader.clearStore(); if (modifyingUsedClass) { reloadedClasses.addAll( projectClassLoader.reinitTypes().stream().map( ClassUtils::convertClassToResourcePath ).collect( toList() ) ); } List<Class<?>> classes = new ArrayList<>(); for (String resourceName : reloadedClasses) { String className = convertResourceToClassName( resourceName ); byte[] bytes = newKM.getBytes(resourceName); if (bytes != null) { Class<?> clazz = projectClassLoader.defineClass( className, resourceName, bytes ); classes.add( clazz ); } } return classes; } private List<String> getModifiedClasses(KieJarChangeSet cs) { List<String> modifiedClasses = new ArrayList<>(); for ( ResourceChangeSet rcs : cs.getChanges().values() ) { if ( rcs.getChangeType() != ChangeType.REMOVED ) { String resourceName = rcs.getResourceName(); if ( resourceName.endsWith( ".class" ) ) { modifiedClasses.add(resourceName); } } } return modifiedClasses; } public Collection<String> getKieBaseNames() { return kProject.getKieBaseNames(); } public Collection<String> getKieSessionNamesInKieBase(String kBaseName) { KieBaseModel kieBaseModel = kProject.getKieBaseModel(kBaseName); return kieBaseModel != null ? kieBaseModel.getKieSessionModels().keySet() : Collections.<String>emptySet(); } public KieBase getKieBase() { KieBaseModel defaultKieBaseModel = kProject.getDefaultKieBaseModel(); if (defaultKieBaseModel == null) { throw new RuntimeException("Cannot find a default KieBase"); } return getKieBase( defaultKieBaseModel.getName() ); } public Results verify() { return isVerifiable() ? this.kProject.verify() : new ResultsImpl(); } public Results verify(String... kModelNames) { return isVerifiable() ? this.kProject.verify(kModelNames) : new ResultsImpl(); } private boolean isVerifiable() { if (kProject instanceof KieModuleKieProject) { InternalKieModule internalKieModule = ((KieModuleKieProject)kProject).getInternalKieModule(); if (!internalKieModule.isVerifiable()) { log.info("{} is a result module of a successful build, so verify returns an empty successful result message", internalKieModule.getClass().getSimpleName()); return false; } } return true; } public KieBase getKieBase(String kBaseName) { KieBase kBase = kBases.get( kBaseName ); if ( kBase == null ) { KieBaseModelImpl kBaseModel = getKieBaseModelImpl(kBaseName); synchronized (kBaseModel) { kBase = kBases.get( kBaseName ); if ( kBase == null ) { BuildContext buildContext = new BuildContext(); kBase = createKieBase(kBaseModel, kProject, buildContext, null); if (kBase == null) { // build error, throw runtime exception throw new RuntimeException("Error while creating KieBase" + buildContext.getMessages().filterMessages(Level.ERROR)); } kBases.put(kBaseName, kBase); } } } return kBase; } public KieBase newKieBase(KieBaseConfiguration conf) { KieBaseModel defaultKieBaseModel = kProject.getDefaultKieBaseModel(); if (defaultKieBaseModel == null) { throw new RuntimeException("Cannot find a default KieBase"); } return newKieBase(defaultKieBaseModel.getName(), conf); } public KieBase newKieBase(String kBaseName, KieBaseConfiguration conf) { BuildContext buildContext = new BuildContext(); KieBase kBase = createKieBase(getKieBaseModelImpl(kBaseName), kProject, buildContext, conf); if ( kBase == null ) { // build error, throw runtime exception throw new RuntimeException( "Error while creating KieBase" + buildContext.getMessages().filterMessages( Level.ERROR ) ); } return kBase; } private KieBase createKieBase(KieBaseModelImpl kBaseModel, KieProject kieProject, BuildContext buildContext, KieBaseConfiguration conf) { if (log.isInfoEnabled()) { log.info( "Start creation of KieBase: " + kBaseModel.getName() ); } InternalKieModule kModule = kieProject.getKieModuleForKBase( kBaseModel.getName() ); InternalKnowledgeBase kBase = kModule.createKieBase(kBaseModel, kieProject, buildContext, conf); kModule.afterKieBaseCreationUpdate(kBaseModel.getName(), kBase); if ( kBase == null ) { return null; } kBase.setResolvedReleaseId(containerReleaseId); kBase.setContainerId(containerId); kBase.setKieContainer(this); kBase.initMBeans(); if (log.isInfoEnabled()) { log.info( "End creation of KieBase: " + kBaseModel.getName() ); } return kBase; } private KieBaseModelImpl getKieBaseModelImpl(String kBaseName) { KieBaseModelImpl kBaseModel = (KieBaseModelImpl) kProject.getKieBaseModel(kBaseName); if (kBaseModel == null) { throw new RuntimeException( "The requested KieBase \"" + kBaseName + "\" does not exist" ); } return kBaseModel; } public KieSession newKieSession() { return newKieSession((Environment)null, null); } public KieSession getKieSession() { KieSessionModel defaultKieSessionModel = findKieSessionModel(false); return getKieSession(defaultKieSessionModel.getName()); } public KieSession newKieSession(KieSessionConfiguration conf) { return newKieSession((Environment)null, conf); } public KieSession newKieSession(Environment environment) { return newKieSession(environment, null); } public KieSession newKieSession(Environment environment, KieSessionConfiguration conf) { return newKieSession(null, environment, conf); } public KieContainerSessionsPool newKieSessionsPool( int initialSize) { return new KieContainerSessionsPoolImpl(this, initialSize); } StatefulSessionPool createKieSessionsPool(String kSessionName, KieSessionConfiguration conf, Environment env, int initialSize, boolean stateless) { KieSessionModel kSessionModel = kSessionName != null ? getKieSessionModel(kSessionName) : findKieSessionModel(false); if ( kSessionModel == null ) { log.error("Unknown KieSession name: " + kSessionName); return null; } InternalKnowledgeBase kBase = (InternalKnowledgeBase) getKieBaseFromKieSessionModel(kSessionModel); return kBase == null ? null : new StatefulSessionPool(kBase, initialSize, () -> { SessionConfiguration sessConf = conf != null ? conf.as(SessionConfiguration.KEY) : kBase.getSessionConfiguration().as(SessionConfiguration.KEY); StatefulKnowledgeSessionImpl kSession = stateless ? ((StatefulKnowledgeSessionImpl) RuntimeComponentFactory.get().createStatefulSession(kBase, env, sessConf, false)).setStateless( true ) : (StatefulKnowledgeSessionImpl) kBase.newKieSession( sessConf, env ); registerNewKieSession( kSessionModel, kBase, kSession ); return kSession; }); } private KieSessionModel findKieSessionModel(boolean stateless) { KieSessionModel defaultKieSessionModel = stateless ? kProject.getDefaultStatelessKieSession() : kProject.getDefaultKieSession(); if (defaultKieSessionModel == null) { throw new RuntimeException(stateless ? "Cannot find a default StatelessKieSession" : "Cannot find a default KieSession"); } return defaultKieSessionModel; } public StatelessKieSession newStatelessKieSession() { return newStatelessKieSession((KieSessionConfiguration)null); } public StatelessKieSession newStatelessKieSession(KieSessionConfiguration conf) { KieSessionModel defaultKieSessionModel = findKieSessionModel(true); return newStatelessKieSession(defaultKieSessionModel.getName(), conf); } public StatelessKieSession getStatelessKieSession() { KieSessionModel defaultKieSessionModel = findKieSessionModel(true); return getStatelessKieSession(defaultKieSessionModel.getName()); } public KieSession newKieSession(String kSessionName) { return newKieSession(kSessionName, null, null); } public KieSession getKieSession(String kSessionName) { KieSession kieSession = kSessions.get(kSessionName); if (kieSession instanceof StatefulKnowledgeSessionImpl && !((StatefulKnowledgeSessionImpl)kieSession).isAlive()) { kSessions.remove(kSessionName); kieSession = null; } return kieSession != null ? kieSession : newKieSession(kSessionName); } public KieSession newKieSession(String kSessionName, Environment environment) { return newKieSession(kSessionName, environment, null); } public KieSession newKieSession(String kSessionName, KieSessionConfiguration conf) { return newKieSession(kSessionName, null, conf); } public KieSession newKieSession(String kSessionName, Environment environment, KieSessionConfiguration conf) { KieSessionModelImpl kSessionModel = kSessionName != null ? (KieSessionModelImpl) getKieSessionModel(kSessionName) : (KieSessionModelImpl) findKieSessionModel(false); if ( kSessionModel == null ) { log.error("Unknown KieSession name: " + kSessionName); return null; } KieBase kBase = getKieBaseFromKieSessionModel( kSessionModel ); if ( kBase == null ) { return null; } KieSession kSession = kBase.newKieSession( conf != null ? conf : getKieSessionConfiguration( kSessionModel ), environment ); registerNewKieSession(kSessionModel, (InternalKnowledgeBase) kBase, kSession); return kSession; } private void registerNewKieSession(KieSessionModel kSessionModel, InternalKnowledgeBase kBase, KieSession kSession) { if (isJndiAvailable()) { wireSessionComponents( kSessionModel, kSession ); } registerLoggers(kSessionModel, kSession); registerCalendars(kSessionModel, kSession); ((StatefulKnowledgeSessionImpl ) kSession).initMBeans(containerId, kBase.getId(), kSessionModel.getName()); kSessions.put(kSessionModel.getName(), kSession); } private KieBase getKieBaseFromKieSessionModel( KieSessionModel kSessionModel ) { if (kSessionModel.getType() == KieSessionModel.KieSessionType.STATELESS) { throw new RuntimeException("Trying to create a stateful KieSession from a stateless KieSessionModel: " + kSessionModel.getName()); } KieBase kBase = getKieBase( kSessionModel.getKieBaseModel().getName() ); if ( kBase == null ) { log.error("Unknown KieBase name: " + kSessionModel.getKieBaseModel().getName()); return null; } return kBase; } private void registerLoggers(KieSessionModel kSessionModel, KieRuntimeEventManager kSession) { KieLoggers kieLoggers = KieServices.Factory.get().getLoggers(); if (kSessionModel.getConsoleLogger() != null) { kieLoggers.newConsoleLogger(kSession); } FileLoggerModel fileLogger = kSessionModel.getFileLogger(); if (fileLogger != null) { if (fileLogger.isThreaded()) { kieLoggers.newThreadedFileLogger(kSession, fileLogger.getFile(), fileLogger.getInterval()); } else { kieLoggers.newFileLogger(kSession, fileLogger.getFile()); } } } private void registerCalendars(KieSessionModel kSessionModel, KieSession kSession) { for (Map.Entry<String, String> entry : kSessionModel.getCalendars().entrySet()) { try { Calendar calendar = (Calendar) getClassLoader().loadClass( entry.getValue() ).newInstance(); kSession.getCalendars().set( entry.getKey(), calendar ); } catch (InstantiationException | IllegalAccessException | ClassNotFoundException e) { log.error( "Cannot instance calendar " + entry.getKey(), e ); } } } public StatelessKieSession newStatelessKieSession(String kSessionName) { return newStatelessKieSession(kSessionName, null); } public StatelessKieSession newStatelessKieSession(String kSessionName, KieSessionConfiguration conf) { KieSessionModelImpl kSessionModel = kSessionName != null ? (KieSessionModelImpl) getKieSessionModel(kSessionName) : (KieSessionModelImpl) findKieSessionModel(true); if ( kSessionModel == null ) { log.error("Unknown KieSession name: " + kSessionName); return null; } if (kSessionModel.getType() == KieSessionModel.KieSessionType.STATEFUL) { throw new RuntimeException("Trying to create a stateless KieSession from a stateful KieSessionModel: " + kSessionModel.getName()); } KieBase kBase = getKieBase( kSessionModel.getKieBaseModel().getName() ); if ( kBase == null ) { log.error("Unknown KieBase name: " + kSessionModel.getKieBaseModel().getName()); return null; } StatelessKieSession statelessKieSession = kBase.newStatelessKieSession( conf != null ? conf : getKieSessionConfiguration( kSessionModel ) ); if (isJndiAvailable()) { wireSessionComponents( kSessionModel, statelessKieSession ); } registerLoggers(kSessionModel, statelessKieSession); ((StatelessKnowledgeSessionImpl) statelessKieSession).initMBeans(containerId, ((InternalKnowledgeBase) kBase).getId(), kSessionModel.getName()); statelessKSessions.put(kSessionModel.getName(), statelessKieSession); return statelessKieSession; } public StatelessKieSession getStatelessKieSession(String kSessionName) { StatelessKieSession kieSession = statelessKSessions.get(kSessionName); return kieSession != null ? kieSession : newStatelessKieSession(kSessionName); } public KieSessionConfiguration getKieSessionConfiguration() { return getKieSessionConfiguration( kProject.getDefaultKieSession() ); } public KieSessionConfiguration getKieSessionConfiguration( String kSessionName ) { KieSessionModelImpl kSessionModel = (KieSessionModelImpl) kProject.getKieSessionModel( kSessionName ); if ( kSessionModel == null ) { log.error("Unknown KieSession name: " + kSessionName); return null; } return getKieSessionConfiguration( kSessionModel ); } private KieSessionConfiguration getKieSessionConfiguration( KieSessionModel kSessionModel ) { KieSessionConfiguration ksConf = sessionConfsCache.computeIfAbsent(kSessionModel.getName(), k -> new KieServicesImpl().newKieSessionConfiguration(null, kProject.getClassLoader()) ); ksConf.setOption( kSessionModel.getClockType() ); ksConf.setOption( kSessionModel.getBeliefSystem() ); return ksConf; } public void dispose() { sessionConfsCache.clear(); kBases.values().forEach( kb -> ( (InternalKnowledgeBase) kb ).setKieContainer(null)); Set<DroolsManagementAgent.CBSKey> cbskeys = new HashSet<>(); if ( isMBeanOptionEnabled() ) { for (Entry<String, KieSession> kv : kSessions.entrySet()) { cbskeys.add(new DroolsManagementAgent.CBSKey(containerId, ((RuleBase) kv.getValue().getKieBase()).getId(), kv.getKey())); } for (Entry<String, StatelessKieSession> kv : statelessKSessions.entrySet()) { cbskeys.add(new DroolsManagementAgent.CBSKey(containerId, ((RuleBase) kv.getValue().getKieBase()).getId(), kv.getKey())); } } for (KieSession kieSession : kSessions.values()) { kieSession.dispose(); } kSessions.clear(); statelessKSessions.clear(); if ( isMBeanOptionEnabled() ) { for (CBSKey c : cbskeys) { DroolsManagementAgent.getInstance().unregisterKnowledgeSessionBean(c); } for (KieBase kb : kBases.values()) { DroolsManagementAgent.getInstance().unregisterKnowledgeBase((RuleBase) kb); } DroolsManagementAgent.getInstance().unregisterMBeansFromOwner(this); } ((InternalKieServices) KieServices.Factory.get()).clearRefToContainerId(this.containerId, this); } @Override public void disposeSession(KieSession kieSession) { if (!isMBeanOptionEnabled()) { kSessions.values().remove( kieSession ); } } private boolean isMBeanOptionEnabled() { return MBeansOption.isEnabled( System.getProperty( MBeansOption.PROPERTY_NAME, MBeansOption.DISABLED.toString() ) ); } public KieProject getKieProject() { return kProject; } public KieModule getKieModuleForKBase(String kBaseName) { return kProject.getKieModuleForKBase( kBaseName ); } public KieBaseModel getKieBaseModel(String kBaseName) { return kProject.getKieBaseModel(kBaseName); } public KieSessionModel getKieSessionModel(String kSessionName) { return kProject.getKieSessionModel(kSessionName); } @Override public ClassLoader getClassLoader() { return this.kProject.getClassLoader(); } }
apache/pinot
35,247
pinot-integration-test-base/src/test/java/org/apache/pinot/integration/tests/BaseClusterIntegrationTest.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pinot.integration.tests; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JsonNode; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.sql.Connection; import java.sql.DriverManager; import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.function.Function; import javax.annotation.Nullable; import org.apache.commons.io.FileUtils; import org.apache.helix.task.TaskPartitionState; import org.apache.helix.task.TaskState; import org.apache.pinot.client.ConnectionFactory; import org.apache.pinot.client.JsonAsyncHttpPinotClientTransportFactory; import org.apache.pinot.client.PinotClientTransportFactory; import org.apache.pinot.client.ResultSetGroup; import org.apache.pinot.common.restlet.resources.ValidDocIdsMetadataInfo; import org.apache.pinot.common.restlet.resources.ValidDocIdsType; import org.apache.pinot.common.utils.TarCompressionUtils; import org.apache.pinot.common.utils.config.TagNameUtils; import org.apache.pinot.plugin.inputformat.csv.CSVMessageDecoder; import org.apache.pinot.plugin.stream.kafka.KafkaStreamConfigProperties; import org.apache.pinot.server.starter.helix.BaseServerStarter; import org.apache.pinot.spi.config.table.ColumnPartitionConfig; import org.apache.pinot.spi.config.table.DedupConfig; import org.apache.pinot.spi.config.table.FieldConfig; import org.apache.pinot.spi.config.table.QueryConfig; import org.apache.pinot.spi.config.table.ReplicaGroupStrategyConfig; import org.apache.pinot.spi.config.table.RoutingConfig; import org.apache.pinot.spi.config.table.SegmentPartitionConfig; import org.apache.pinot.spi.config.table.TableConfig; import org.apache.pinot.spi.config.table.TableTaskConfig; import org.apache.pinot.spi.config.table.TableType; import org.apache.pinot.spi.config.table.UpsertConfig; import org.apache.pinot.spi.config.table.ingestion.IngestionConfig; import org.apache.pinot.spi.data.Schema; import org.apache.pinot.spi.data.readers.FileFormat; import org.apache.pinot.spi.stream.StreamConfigProperties; import org.apache.pinot.spi.stream.StreamDataServerStartable; import org.apache.pinot.spi.utils.Enablement; import org.apache.pinot.spi.utils.JsonUtils; import org.apache.pinot.spi.utils.builder.TableConfigBuilder; import org.apache.pinot.spi.utils.builder.TableNameBuilder; import org.apache.pinot.tools.utils.KafkaStarterUtils; import org.apache.pinot.util.TestUtils; import org.intellij.lang.annotations.Language; import org.testng.Assert; /** * Shared implementation details of the cluster integration tests. */ public abstract class BaseClusterIntegrationTest extends ClusterTest { // Default settings protected static final String DEFAULT_TABLE_NAME = "mytable"; protected static final String DEFAULT_SCHEMA_NAME = "mytable"; protected static final String DEFAULT_SCHEMA_FILE_NAME = "On_Time_On_Time_Performance_2014_100k_subset_nonulls.schema"; protected static final String DEFAULT_TIME_COLUMN_NAME = "DaysSinceEpoch"; protected static final String DEFAULT_AVRO_TAR_FILE_NAME = "On_Time_On_Time_Performance_2014_100k_subset_nonulls.tar.gz"; protected static final long DEFAULT_COUNT_STAR_RESULT = 115545L; protected static final int DEFAULT_LLC_SEGMENT_FLUSH_SIZE = 5000; protected static final int DEFAULT_TRANSACTION_NUM_KAFKA_BROKERS = 3; protected static final int DEFAULT_LLC_NUM_KAFKA_BROKERS = 2; protected static final int DEFAULT_LLC_NUM_KAFKA_PARTITIONS = 2; protected static final int DEFAULT_MAX_NUM_KAFKA_MESSAGES_PER_BATCH = 10000; protected static final List<String> DEFAULT_NO_DICTIONARY_COLUMNS = Arrays.asList("ActualElapsedTime", "ArrDelay", "DepDelay", "CRSDepTime"); protected static final String DEFAULT_SORTED_COLUMN = "Carrier"; protected static final List<String> DEFAULT_INVERTED_INDEX_COLUMNS = Arrays.asList("FlightNum", "Origin", "Quarter"); private static final List<String> DEFAULT_BLOOM_FILTER_COLUMNS = Arrays.asList("FlightNum", "Origin"); private static final List<String> DEFAULT_RANGE_INDEX_COLUMNS = Collections.singletonList("Origin"); protected static final int DEFAULT_NUM_REPLICAS = 1; protected static final boolean DEFAULT_NULL_HANDLING_ENABLED = false; protected final File _tempDir = new File(FileUtils.getTempDirectory(), getClass().getSimpleName()); protected final File _segmentDir = new File(_tempDir, "segmentDir"); protected final File _tarDir = new File(_tempDir, "tarDir"); protected List<StreamDataServerStartable> _kafkaStarters; protected org.apache.pinot.client.Connection _pinotConnection; protected org.apache.pinot.client.Connection _pinotConnectionV2; protected Connection _h2Connection; protected QueryGenerator _queryGenerator; /** * The following getters can be overridden to change default settings. */ protected String getTableName() { return DEFAULT_TABLE_NAME; } protected String getSchemaFileName() { return DEFAULT_SCHEMA_FILE_NAME; } @Nullable protected String getTimeColumnName() { return DEFAULT_TIME_COLUMN_NAME; } protected String getAvroTarFileName() { return DEFAULT_AVRO_TAR_FILE_NAME; } protected long getCountStarResult() { return DEFAULT_COUNT_STAR_RESULT; } protected boolean useKafkaTransaction() { return false; } protected String getStreamConsumerFactoryClassName() { return KafkaStarterUtils.KAFKA_STREAM_CONSUMER_FACTORY_CLASS_NAME; } protected int getRealtimeSegmentFlushSize() { return DEFAULT_LLC_SEGMENT_FLUSH_SIZE; } protected int getNumKafkaBrokers() { return useKafkaTransaction() ? DEFAULT_TRANSACTION_NUM_KAFKA_BROKERS : DEFAULT_LLC_NUM_KAFKA_BROKERS; } protected int getKafkaPort() { int idx = RANDOM.nextInt(_kafkaStarters.size()); return _kafkaStarters.get(idx).getPort(); } protected String getKafkaZKAddress() { return getZkUrl() + "/kafka"; } protected int getNumKafkaPartitions() { return DEFAULT_LLC_NUM_KAFKA_PARTITIONS; } protected String getKafkaTopic() { return getClass().getSimpleName(); } protected int getMaxNumKafkaMessagesPerBatch() { return DEFAULT_MAX_NUM_KAFKA_MESSAGES_PER_BATCH; } @Nullable protected byte[] getKafkaMessageHeader() { return null; } @Nullable protected String getPartitionColumn() { return null; } @Nullable protected String getSortedColumn() { return DEFAULT_SORTED_COLUMN; } @Nullable protected List<String> getInvertedIndexColumns() { return new ArrayList<>(DEFAULT_INVERTED_INDEX_COLUMNS); } protected boolean isCreateInvertedIndexDuringSegmentGeneration() { return false; } @Nullable protected List<String> getNoDictionaryColumns() { return new ArrayList<>(DEFAULT_NO_DICTIONARY_COLUMNS); } @Nullable protected List<String> getRangeIndexColumns() { return new ArrayList<>(DEFAULT_RANGE_INDEX_COLUMNS); } @Nullable protected RoutingConfig getRoutingConfig() { // Default routing config is handled by broker return null; } @Nullable protected UpsertConfig getUpsertConfig() { return null; } @Nullable protected List<String> getBloomFilterColumns() { return new ArrayList<>(DEFAULT_BLOOM_FILTER_COLUMNS); } @Nullable protected List<FieldConfig> getFieldConfigs() { return null; } protected int getNumReplicas() { return DEFAULT_NUM_REPLICAS; } @Nullable protected String getSegmentVersion() { return null; } @Nullable protected String getLoadMode() { return null; } @Nullable protected TableTaskConfig getTaskConfig() { return null; } @Nullable protected String getBrokerTenant() { return TagNameUtils.DEFAULT_TENANT_NAME; } @Nullable protected String getServerTenant() { return TagNameUtils.DEFAULT_TENANT_NAME; } @Nullable protected IngestionConfig getIngestionConfig() { return null; } protected QueryConfig getQueryConfig() { // Enable groovy for tables used in the tests return new QueryConfig(null, false, null, null, null, null); } protected boolean getNullHandlingEnabled() { return DEFAULT_NULL_HANDLING_ENABLED; } @Nullable protected SegmentPartitionConfig getSegmentPartitionConfig() { return null; } @Nullable protected ReplicaGroupStrategyConfig getReplicaGroupStrategyConfig() { return null; } /** * Creates a new schema. */ protected Schema createSchema() throws IOException { Schema schema = createSchema(getSchemaFileName()); schema.setSchemaName(getTableName()); return schema; } protected Schema createSchema(String schemaFileName) throws IOException { InputStream inputStream = getClass().getClassLoader().getResourceAsStream(schemaFileName); Assert.assertNotNull(inputStream); return Schema.fromInputStream(inputStream); } protected Schema createSchema(File schemaFile) throws IOException { return Schema.fromInputStream(new FileInputStream(schemaFile)); } protected TableConfig createTableConfig(String tableConfigFileName) throws IOException { URL configPathUrl = getClass().getClassLoader().getResource(tableConfigFileName); Assert.assertNotNull(configPathUrl); return createTableConfig(new File(configPathUrl.getFile())); } protected TableConfig createTableConfig(File tableConfigFile) throws IOException { InputStream inputStream = new FileInputStream(tableConfigFile); Assert.assertNotNull(inputStream); return JsonUtils.inputStreamToObject(inputStream, TableConfig.class); } /** * Creates a new OFFLINE table config. */ protected TableConfig createOfflineTableConfig() { // @formatter:off return new TableConfigBuilder(TableType.OFFLINE) .setTableName(getTableName()) .setTimeColumnName(getTimeColumnName()) .setSortedColumn(getSortedColumn()) .setInvertedIndexColumns(getInvertedIndexColumns()) .setCreateInvertedIndexDuringSegmentGeneration(isCreateInvertedIndexDuringSegmentGeneration()) .setNoDictionaryColumns(getNoDictionaryColumns()) .setRangeIndexColumns(getRangeIndexColumns()) .setBloomFilterColumns(getBloomFilterColumns()) .setFieldConfigList(getFieldConfigs()) .setNumReplicas(getNumReplicas()) .setSegmentVersion(getSegmentVersion()) .setLoadMode(getLoadMode()) .setTaskConfig(getTaskConfig()) .setBrokerTenant(getBrokerTenant()) .setServerTenant(getServerTenant()) .setIngestionConfig(getIngestionConfig()) .setQueryConfig(getQueryConfig()) .setNullHandlingEnabled(getNullHandlingEnabled()) .setSegmentPartitionConfig(getSegmentPartitionConfig()) .setOptimizeNoDictStatsCollection(true) .build(); // @formatter:on } /** * Returns the OFFLINE table config in the cluster. */ protected TableConfig getOfflineTableConfig() { return getOfflineTableConfig(getTableName()); } protected Map<String, String> getStreamConfigs() { return getStreamConfigMap(); } protected Map<String, String> getStreamConfigMap() { Map<String, String> streamConfigMap = new HashMap<>(); String streamType = "kafka"; streamConfigMap.put(StreamConfigProperties.STREAM_TYPE, streamType); streamConfigMap.put(KafkaStreamConfigProperties.constructStreamProperty( KafkaStreamConfigProperties.LowLevelConsumer.KAFKA_BROKER_LIST), "localhost:" + _kafkaStarters.get(0).getPort()); if (useKafkaTransaction()) { streamConfigMap.put(KafkaStreamConfigProperties.constructStreamProperty( KafkaStreamConfigProperties.LowLevelConsumer.KAFKA_ISOLATION_LEVEL), KafkaStreamConfigProperties.LowLevelConsumer.KAFKA_ISOLATION_LEVEL_READ_COMMITTED); } streamConfigMap.put(StreamConfigProperties.constructStreamProperty(streamType, StreamConfigProperties.STREAM_CONSUMER_FACTORY_CLASS), getStreamConsumerFactoryClassName()); streamConfigMap.put( StreamConfigProperties.constructStreamProperty(streamType, StreamConfigProperties.STREAM_TOPIC_NAME), getKafkaTopic()); streamConfigMap.put( StreamConfigProperties.constructStreamProperty(streamType, StreamConfigProperties.STREAM_DECODER_CLASS), AvroFileSchemaKafkaAvroMessageDecoder.class.getName()); streamConfigMap.put(StreamConfigProperties.SEGMENT_FLUSH_THRESHOLD_ROWS, Integer.toString(getRealtimeSegmentFlushSize())); streamConfigMap.put(StreamConfigProperties.constructStreamProperty(streamType, StreamConfigProperties.STREAM_CONSUMER_OFFSET_CRITERIA), "smallest"); return streamConfigMap; } /** * Creates a new REALTIME table config. */ protected TableConfig createRealtimeTableConfig(File sampleAvroFile) { AvroFileSchemaKafkaAvroMessageDecoder._avroFile = sampleAvroFile; return getTableConfigBuilder(TableType.REALTIME).build(); } // TODO - Use this method to create table config for all table types to avoid redundant code protected TableConfigBuilder getTableConfigBuilder(TableType tableType) { return new TableConfigBuilder(tableType) .setTableName(getTableName()) .setTimeColumnName(getTimeColumnName()) .setSortedColumn(getSortedColumn()) .setInvertedIndexColumns(getInvertedIndexColumns()) .setNoDictionaryColumns(getNoDictionaryColumns()) .setRangeIndexColumns(getRangeIndexColumns()) .setRoutingConfig(getRoutingConfig()) .setUpsertConfig(getUpsertConfig()) .setBloomFilterColumns(getBloomFilterColumns()) .setFieldConfigList(getFieldConfigs()) .setNumReplicas(getNumReplicas()) .setSegmentVersion(getSegmentVersion()) .setLoadMode(getLoadMode()) .setTaskConfig(getTaskConfig()) .setBrokerTenant(getBrokerTenant()) .setServerTenant(getServerTenant()) .setIngestionConfig(getIngestionConfig()) .setQueryConfig(getQueryConfig()) .setStreamConfigs(getStreamConfigs()) .setNullHandlingEnabled(getNullHandlingEnabled()) .setSegmentPartitionConfig(getSegmentPartitionConfig()) .setOptimizeNoDictStatsCollection(true) .setReplicaGroupStrategyConfig(getReplicaGroupStrategyConfig()); } /** * Creates a new Upsert enabled table config. */ protected TableConfig createUpsertTableConfig(File sampleAvroFile, String primaryKeyColumn, String deleteColumn, int numPartitions) { AvroFileSchemaKafkaAvroMessageDecoder._avroFile = sampleAvroFile; Map<String, ColumnPartitionConfig> columnPartitionConfigMap = new HashMap<>(); columnPartitionConfigMap.put(primaryKeyColumn, new ColumnPartitionConfig("Murmur", numPartitions)); UpsertConfig upsertConfig = new UpsertConfig(UpsertConfig.Mode.FULL); upsertConfig.setDeleteRecordColumn(deleteColumn); return new TableConfigBuilder(TableType.REALTIME).setTableName(getTableName()) .setTimeColumnName(getTimeColumnName()).setFieldConfigList(getFieldConfigs()).setNumReplicas(getNumReplicas()) .setSegmentVersion(getSegmentVersion()).setLoadMode(getLoadMode()).setTaskConfig(getTaskConfig()) .setBrokerTenant(getBrokerTenant()).setServerTenant(getServerTenant()).setIngestionConfig(getIngestionConfig()) .setStreamConfigs(getStreamConfigs()).setNullHandlingEnabled(getNullHandlingEnabled()).setRoutingConfig( new RoutingConfig(null, null, RoutingConfig.STRICT_REPLICA_GROUP_INSTANCE_SELECTOR_TYPE, false)) .setSegmentPartitionConfig(new SegmentPartitionConfig(columnPartitionConfigMap)) .setReplicaGroupStrategyConfig(new ReplicaGroupStrategyConfig(primaryKeyColumn, 1)) .setOptimizeNoDictStatsCollection(true) .setUpsertConfig(upsertConfig).build(); } protected Map<String, String> getCSVDecoderProperties(@Nullable String delimiter, @Nullable String csvHeaderProperty) { String streamType = "kafka"; Map<String, String> csvDecoderProperties = new HashMap<>(); csvDecoderProperties.put( StreamConfigProperties.constructStreamProperty(streamType, StreamConfigProperties.STREAM_DECODER_CLASS), CSVMessageDecoder.class.getName()); if (delimiter != null) { csvDecoderProperties.put(StreamConfigProperties.constructStreamProperty(streamType, "decoder.prop.delimiter"), delimiter); } if (csvHeaderProperty != null) { csvDecoderProperties.put(StreamConfigProperties.constructStreamProperty(streamType, "decoder.prop.header"), csvHeaderProperty); } return csvDecoderProperties; } /** * Creates a new Upsert enabled table config. */ protected TableConfig createCSVUpsertTableConfig(String tableName, @Nullable String kafkaTopicName, int numPartitions, Map<String, String> streamDecoderProperties, UpsertConfig upsertConfig, String primaryKeyColumn) { Map<String, ColumnPartitionConfig> columnPartitionConfigMap = new HashMap<>(); columnPartitionConfigMap.put(primaryKeyColumn, new ColumnPartitionConfig("Murmur", numPartitions)); if (upsertConfig == null) { upsertConfig = new UpsertConfig(UpsertConfig.Mode.FULL); upsertConfig.setSnapshot(Enablement.ENABLE); } if (kafkaTopicName == null) { kafkaTopicName = getKafkaTopic(); } Map<String, String> streamConfigsMap = getStreamConfigMap(); streamConfigsMap.put( StreamConfigProperties.constructStreamProperty("kafka", StreamConfigProperties.STREAM_TOPIC_NAME), kafkaTopicName); streamConfigsMap.putAll(streamDecoderProperties); return new TableConfigBuilder(TableType.REALTIME).setTableName(tableName).setTimeColumnName(getTimeColumnName()) .setFieldConfigList(getFieldConfigs()).setNumReplicas(getNumReplicas()).setSegmentVersion(getSegmentVersion()) .setLoadMode(getLoadMode()).setTaskConfig(getTaskConfig()).setBrokerTenant(getBrokerTenant()) .setServerTenant(getServerTenant()).setIngestionConfig(getIngestionConfig()).setStreamConfigs(streamConfigsMap) .setNullHandlingEnabled(UpsertConfig.Mode.PARTIAL.equals(upsertConfig.getMode()) || getNullHandlingEnabled()) .setRoutingConfig( new RoutingConfig(null, null, RoutingConfig.STRICT_REPLICA_GROUP_INSTANCE_SELECTOR_TYPE, false)) .setSegmentPartitionConfig(new SegmentPartitionConfig(columnPartitionConfigMap)) .setReplicaGroupStrategyConfig(new ReplicaGroupStrategyConfig(primaryKeyColumn, 1)) .setOptimizeNoDictStatsCollection(true) .setUpsertConfig(upsertConfig).build(); } /** * Creates a new Dedup enabled table config */ protected TableConfig createDedupTableConfig(File sampleAvroFile, String primaryKeyColumn, int numPartitions) { AvroFileSchemaKafkaAvroMessageDecoder._avroFile = sampleAvroFile; Map<String, ColumnPartitionConfig> columnPartitionConfigMap = new HashMap<>(); columnPartitionConfigMap.put(primaryKeyColumn, new ColumnPartitionConfig("Murmur", numPartitions)); return new TableConfigBuilder(TableType.REALTIME).setTableName(getTableName()) .setTimeColumnName(getTimeColumnName()) .setFieldConfigList(getFieldConfigs()) .setNumReplicas(getNumReplicas()) .setSegmentVersion(getSegmentVersion()) .setLoadMode(getLoadMode()) .setTaskConfig(getTaskConfig()) .setBrokerTenant(getBrokerTenant()) .setServerTenant(getServerTenant()) .setIngestionConfig(getIngestionConfig()) .setNullHandlingEnabled(getNullHandlingEnabled()) .setRoutingConfig( new RoutingConfig(null, null, RoutingConfig.STRICT_REPLICA_GROUP_INSTANCE_SELECTOR_TYPE, false)) .setSegmentPartitionConfig(new SegmentPartitionConfig(columnPartitionConfigMap)) .setReplicaGroupStrategyConfig(new ReplicaGroupStrategyConfig(primaryKeyColumn, 1)) .setDedupConfig(new DedupConfig()) .setOptimizeNoDictStatsCollection(true) .build(); } /** * Returns the REALTIME table config in the cluster. */ protected TableConfig getRealtimeTableConfig() { return getRealtimeTableConfig(getTableName()); } /** * Returns the headers to be used for the connection to Pinot cluster. * {@link PinotClientTransportFactory} */ protected Map<String, String> getPinotClientTransportHeaders() { return Map.of(); } /** * Get the Pinot connection. * * @return Pinot connection */ protected org.apache.pinot.client.Connection getPinotConnection() { // TODO: This code is assuming getPinotConnectionProperties() will always return the same values if (useMultiStageQueryEngine()) { if (_pinotConnectionV2 == null) { Properties properties = getPinotConnectionProperties(); properties.put("useMultistageEngine", "true"); _pinotConnectionV2 = ConnectionFactory.fromZookeeper(getZkUrl() + "/" + getHelixClusterName(), new JsonAsyncHttpPinotClientTransportFactory().withConnectionProperties(properties).buildTransport()); } return _pinotConnectionV2; } if (_pinotConnection == null) { JsonAsyncHttpPinotClientTransportFactory factory = new JsonAsyncHttpPinotClientTransportFactory() .withConnectionProperties(getPinotConnectionProperties()); factory.setHeaders(getPinotClientTransportHeaders()); _pinotConnection = ConnectionFactory.fromZookeeper(getZkUrl() + "/" + getHelixClusterName(), factory.buildTransport()); } return _pinotConnection; } protected Properties getPinotConnectionProperties() { Properties properties = new Properties(); properties.putAll(getExtraQueryProperties()); return properties; } /** * Get the H2 connection. H2 connection must be set up before calling this method. * * @return H2 connection */ protected Connection getH2Connection() { Assert.assertNotNull(_h2Connection, "H2 Connection has not been initialized"); return _h2Connection; } /** * Get the query generator. Query generator must be set up before calling this method. * * @return Query generator. */ protected QueryGenerator getQueryGenerator() { Assert.assertNotNull(_queryGenerator, "Query Generator has not been initialized"); return _queryGenerator; } /** * Sets up the H2 connection */ protected void setUpH2Connection() throws Exception { Assert.assertNull(_h2Connection); Class.forName("org.h2.Driver"); _h2Connection = DriverManager.getConnection("jdbc:h2:mem:"); } /** * Sets up the H2 connection to a table with pre-loaded data. */ protected void setUpH2Connection(List<File> avroFiles) throws Exception { setUpH2Connection(); ClusterIntegrationTestUtils.setUpH2TableWithAvro(avroFiles, getTableName(), _h2Connection); } /** * Sets up the query generator using the given Avro files. */ protected void setUpQueryGenerator(List<File> avroFiles) { Assert.assertNull(_queryGenerator); String tableName = getTableName(); _queryGenerator = new QueryGenerator(avroFiles, tableName, tableName); } protected List<File> unpackAvroData(File outputDir) throws Exception { return unpackTarData(getAvroTarFileName(), outputDir); } /** * Unpack the tarred data into the given directory. * * @param tarFileName Input tar filename * @param outputDir Output directory * @return List of files unpacked. * @throws Exception */ protected List<File> unpackTarData(String tarFileName, File outputDir) throws Exception { InputStream inputStream = getClass().getClassLoader().getResourceAsStream(tarFileName); Assert.assertNotNull(inputStream); return TarCompressionUtils.untar(inputStream, outputDir); } protected void pushAvroIntoKafka(List<File> avroFiles) throws Exception { ClusterIntegrationTestUtils.pushAvroIntoKafka(avroFiles, "localhost:" + getKafkaPort(), getKafkaTopic(), getMaxNumKafkaMessagesPerBatch(), getKafkaMessageHeader(), getPartitionColumn(), injectTombstones()); } protected void pushCsvIntoKafka(File csvFile, String kafkaTopic, @Nullable Integer partitionColumnIndex) throws Exception { ClusterIntegrationTestUtils.pushCsvIntoKafka(csvFile, "localhost:" + getKafkaPort(), kafkaTopic, partitionColumnIndex, injectTombstones()); } protected void pushCsvIntoKafka(List<String> csvRecords, String kafkaTopic, @Nullable Integer partitionColumnIndex) throws Exception { ClusterIntegrationTestUtils.pushCsvIntoKafka(csvRecords, "localhost:" + getKafkaPort(), kafkaTopic, partitionColumnIndex, injectTombstones()); } protected boolean injectTombstones() { return false; } protected void createAndUploadSegmentFromClasspath(TableConfig tableConfig, Schema schema, String dataFilePath, FileFormat fileFormat, long expectedNoOfDocs, long timeoutMs) throws Exception { URL dataPathUrl = getClass().getClassLoader().getResource(dataFilePath); assert dataPathUrl != null; File file = new File(dataPathUrl.getFile()); createAndUploadSegmentFromFile(tableConfig, schema, file, fileFormat, expectedNoOfDocs, timeoutMs); } /// @deprecated use createAndUploadSegmentFromClasspath instead, given what this class does is to look for /// dataFilePath on the classpath @Deprecated protected void createAndUploadSegmentFromFile(TableConfig tableConfig, Schema schema, String dataFilePath, FileFormat fileFormat, long expectedNoOfDocs, long timeoutMs) throws Exception { createAndUploadSegmentFromClasspath(tableConfig, schema, dataFilePath, fileFormat, expectedNoOfDocs, timeoutMs); } protected void createAndUploadSegmentFromFile(TableConfig tableConfig, Schema schema, File file, FileFormat fileFormat, long expectedNoOfDocs, long timeoutMs) throws Exception { TestUtils.ensureDirectoriesExistAndEmpty(_segmentDir, _tarDir); ClusterIntegrationTestUtils.buildSegmentFromFile(file, tableConfig, schema, "%", _segmentDir, _tarDir, fileFormat); uploadSegments(tableConfig.getTableName(), _tarDir); TestUtils.waitForCondition(() -> getCurrentCountStarResult(tableConfig.getTableName()) == expectedNoOfDocs, 100L, timeoutMs, "Failed to load " + expectedNoOfDocs + " documents in table " + tableConfig.getTableName(), true, Duration.ofMillis(timeoutMs / 10)); } protected List<File> getAllAvroFiles() throws Exception { // Unpack the Avro files int numSegments = unpackAvroData(_tempDir).size(); // Avro files has to be ordered as time series data List<File> avroFiles = new ArrayList<>(numSegments); for (int i = 1; i <= numSegments; i++) { avroFiles.add(new File(_tempDir, "On_Time_On_Time_Performance_2014_" + i + ".avro")); } return avroFiles; } protected List<File> getOfflineAvroFiles(List<File> avroFiles, int numOfflineSegments) { List<File> offlineAvroFiles = new ArrayList<>(numOfflineSegments); for (int i = 0; i < numOfflineSegments; i++) { offlineAvroFiles.add(avroFiles.get(i)); } return offlineAvroFiles; } protected List<File> getRealtimeAvroFiles(List<File> avroFiles, int numRealtimeSegments) { int numSegments = avroFiles.size(); List<File> realtimeAvroFiles = new ArrayList<>(numRealtimeSegments); for (int i = numSegments - numRealtimeSegments; i < numSegments; i++) { realtimeAvroFiles.add(avroFiles.get(i)); } return realtimeAvroFiles; } protected void startKafka() { startKafkaWithoutTopic(); createKafkaTopic(getKafkaTopic()); } protected void startKafkaWithoutTopic() { startKafkaWithoutTopic(KafkaStarterUtils.DEFAULT_KAFKA_PORT); } protected void startKafkaWithoutTopic(int port) { _kafkaStarters = KafkaStarterUtils.startServers(getNumKafkaBrokers(), port, getKafkaZKAddress(), KafkaStarterUtils.getDefaultKafkaConfiguration()); } protected void createKafkaTopic(String topic) { _kafkaStarters.get(0).createTopic(topic, KafkaStarterUtils.getTopicCreationProps(getNumKafkaPartitions())); } protected void stopKafka() { for (StreamDataServerStartable kafkaStarter : _kafkaStarters) { kafkaStarter.stop(); } } /** * Get current result for "SELECT COUNT(*)". * * @return Current count start result */ protected long getCurrentCountStarResult() { return getCurrentCountStarResult(getTableName()); } protected long getCurrentCountStarResult(String tableName) { ResultSetGroup resultSetGroup = getPinotConnection().execute("SELECT COUNT(*) FROM " + tableName); if (resultSetGroup.getResultSetCount() > 0) { return resultSetGroup.getResultSet(0).getLong(0); } return 0; } protected void waitForMinionTaskCompletion(String taskId, long timeout) { TestUtils.waitForCondition(aVoid -> _controllerStarter.getHelixTaskResourceManager().getTaskState(taskId) == TaskState.COMPLETED, timeout, "Failed to complete the task " + taskId); // Validate that there were > 0 subtasks so that we know the task was actually run Assert.assertFalse(_controllerStarter.getHelixTaskResourceManager().getSubtaskStates(taskId).isEmpty()); // Validate that all subtasks are completed successfully. A task can be marked completed even if some subtasks // failed, so we need to check the subtask states. Map<String, TaskPartitionState> subTaskStates = _controllerStarter.getHelixTaskResourceManager() .getSubtaskStates(taskId); Assert.assertTrue(subTaskStates.values().stream().allMatch(x -> x == TaskPartitionState.COMPLETED), "Not all subtasks are completed for task " + taskId + " : " + subTaskStates); } protected List<String> getSegments(String tableNameWithType) { return _controllerStarter.getHelixResourceManager().getSegmentsFor(tableNameWithType, false); } protected int getSegmentCount(String tableNameWithType) { return getSegments(tableNameWithType).size(); } /** * Wait for all documents to get loaded. * * @param timeoutMs Timeout in milliseconds * @throws Exception */ protected void waitForAllDocsLoaded(long timeoutMs) throws Exception { waitForDocsLoaded(timeoutMs, true, getTableName()); } protected void waitForDocsLoaded(long timeoutMs, boolean raiseError, String tableName) { long countStarResult = getCountStarResult(); TestUtils.waitForCondition(() -> getCurrentCountStarResult(tableName) == countStarResult, 100L, timeoutMs, "Failed to load " + countStarResult + " documents", raiseError, Duration.ofMillis(timeoutMs / 10)); } /** * Wait for servers to remove the table data manager after the table is deleted. */ protected void waitForTableDataManagerRemoved(String tableNameWithType) { TestUtils.waitForCondition(aVoid -> { for (BaseServerStarter serverStarter : _serverStarters) { if (serverStarter.getServerInstance().getInstanceDataManager().getTableDataManager(tableNameWithType) != null) { return false; } } return true; }, 60_000L, "Failed to remove table data manager for table: " + tableNameWithType); } /** * Reset table utils. */ protected void resetTable(String tableName, TableType tableType, @Nullable String targetInstance) throws IOException { getControllerRequestClient().resetTable(TableNameBuilder.forType(tableType).tableNameWithType(tableName), targetInstance); } /** * Run equivalent Pinot and H2 query and compare the results. */ protected void testQuery(@Language("sql") String query) throws Exception { testQuery(query, query); } /** * Run equivalent Pinot and H2 query and compare the results. */ protected void testQuery(@Language("sql") String pinotQuery, @Language("sql") String h2Query) throws Exception { ClusterIntegrationTestUtils.testQuery(pinotQuery, getBrokerBaseApiUrl(), getPinotConnection(), h2Query, getH2Connection(), null, getExtraQueryProperties(), useMultiStageQueryEngine()); } /** * Run equivalent Pinot and H2 query and compare the results. */ protected void testQueryWithMatchingRowCount(@Language("sql") String pinotQuery, @Language("sql") String h2Query) throws Exception { ClusterIntegrationTestUtils.testQueryWithMatchingRowCount(pinotQuery, getBrokerBaseApiUrl(), getPinotConnection(), h2Query, getH2Connection(), null, getExtraQueryProperties(), useMultiStageQueryEngine()); } protected String getType(JsonNode jsonNode, int colIndex) { return jsonNode.get("resultTable").get("dataSchema").get("columnDataTypes").get(colIndex).asText(); } protected <T> T getCellValue(JsonNode jsonNode, int colIndex, int rowIndex, Function<JsonNode, T> extract) { JsonNode cellResult = jsonNode.get("resultTable").get("rows").get(rowIndex).get(colIndex); return extract.apply(cellResult); } protected long getLongCellValue(JsonNode jsonNode, int colIndex, int rowIndex) { return getCellValue(jsonNode, colIndex, rowIndex, JsonNode::asLong).longValue(); } protected JsonNode getColumnIndexSize(String column) throws Exception { return JsonUtils.stringToJsonNode( sendGetRequest(_controllerRequestURLBuilder.forTableAggregateMetadata(getTableName(), List.of(column)))) .get("columnIndexSizeMap").get(column); } /** * Get all segment names for a given tableName and tableType. */ protected List<String> getSegmentNames(String tableName, @Nullable String tableType) throws Exception { return getControllerRequestClient().listSegments(tableName, tableType, true); } protected List<ValidDocIdsMetadataInfo> getValidDocIdsMetadata(String tableNameWithType, ValidDocIdsType validDocIdsType) throws Exception { StringBuilder urlBuilder = new StringBuilder( _controllerRequestURLBuilder.forValidDocIdsMetadata(tableNameWithType, validDocIdsType.toString())); String responseString = sendGetRequest(urlBuilder.toString()); return JsonUtils.stringToObject(responseString, new TypeReference<>() { }); } }
openjdk/jdk8
34,238
jdk/test/java/awt/Focus/FocusTraversalPolicy/LayoutFTPTest.java
/* * Copyright (c) 2007, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* @test @bug 6463545 @summary Tests javax.swing.LayoutFocusTraversalPolicy functionality. @author anton.tarasov, oleg.sukhodolsky: area=awt.focus @library ../../regtesthelpers @build AbstractPolicyTest @run main LayoutFTPTest */ import java.awt.*; import javax.swing.*; import java.awt.event.*; import java.util.*; import test.java.awt.regtesthelpers.AbstractPolicyTest; /* Below are some notes about changes in SortingFocusTraversalPolicy behaviour. container(root) [...] - focus traversal cycle with the <container> as the root. container(provider) [...] - focus traversal cycle with the <container> as the provider. container(..)(focusable) [...] - <container> is implicitly set focusable. comp[unfocusable] - <comp> is set unfocusable. 1. frame [ container(root)(focusable) [...] ] - getComponentAfter(<frame>, <container>) returns <container>. If <container> is the default component to focus in its own cycle. * NO CHANGE * 3. frame [ comp1 container(root)(focusable) [ comp2 ] comp3 ] - getComponentBefore(<frame>, <comp3>) returns <comp2>. ** BEHAVIOUR CHANGE ** Previously <container> would be returned. This was a bug as it wasn't according to the spec. - getComponentBefore(<container>, <comp2>) returns <container>. * NO CHANGE * - getComponentBefore(<frame>, <container>) returns <comp1>. * NO CHANGE * - getComponentBefore(<container>, <container>) returns <comp2>. * NO CHANGE * 4. frame [ container(provider) [...] comp ] - getComponentAfter(<frame>, <container>) returns <container>'s default. ** BEHAVIOUR CHANGE. SPEC ADDITION ** Previously <comp> would be returned. Not specified in the spec. - getComponentBefore(<frame>, <comp>) returns <container>'s last. ** SPEC CHANGE ** The spec says (incorrectly) that default should be returned. 5. frame [ container(provider)(focusable) [...] comp2 ] - getComponentBefore(<frame>, <comp2>) returns <container>'s last. ** BEHAVIOUR CHANGE. SPEC ADDITION ** Previously <container> would be returned. Not specified in the spec. 6. frame [ comp1 container(root) [...] comp2 ] - getComponentAfter(<frame>, <comp1>) returns <container>'s default. ** BEHAVIOUR CHANGE. SPEC ADDITION ** Previously <comp2> would be returned. It's just the fix for 6240842. Not specified in the spec. 7. frame [ comp1 container(root) [...] comp2(unfocusable) comp3 ] - getComponentBefore(<frame>, <comp3>) returns <container>'s default. ** BEHAVIOUR CHANGE ** Previously <comp1> would be returned. This was a bug, because in case if <comp2> is focusable getComponentBefore(<frame>, <comp2>) would return <container>'s default. */ public class LayoutFTPTest { final int TESTS_NUMBER = 11; public static void main(String[] args) { LayoutFTPTest app = new LayoutFTPTest(); app.start(); } public void start() { try { Class clazz = null; AbstractPolicyTest test = null; for (int i = 1; i <= TESTS_NUMBER; i++) { clazz = Class.forName("PolicyTest" + i); if (clazz != null) { test = (AbstractPolicyTest)clazz.newInstance(); System.out.print("Test " + i + " is in progress..."); test.testIt(); System.out.println(" passed."); } } } catch (RuntimeException rte) { throw rte; } catch (Exception e) { throw new RuntimeException("Error: unexpected exception cought!", e); } } } /* * frame [ container1 [...] container2 [...] container3 [...] ] * - verifies simple configuration. */ class PolicyTest1 extends AbstractPolicyTest { protected Frame createFrame() { JFrame jframe = (JFrame) registerComponent("jframe", new JFrame("Test Frame")); jframe.setLayout(new GridLayout(3, 1)); for (int i = 0; i < 3; i++) { Container cont = (Container) registerComponent("jpanel" + i, new JPanel()); for (int j = 0; j < 3; j++) { cont.add(registerComponent("btn " + (j + i*100), new JButton("jbutton"))); } jframe.add(cont); } return jframe; } protected void customizeHierarchy() { ((Container)getComponent("jframe")).setFocusTraversalPolicy(new LayoutFocusTraversalPolicy()); } protected Map<String, String> getForwardOrder() { Map<String, String> order = new HashMap<String, String>(); order.put("btn 0", "btn 1"); order.put("btn 1", "btn 2"); order.put("btn 2", "btn 100"); order.put("btn 100", "btn 101"); order.put("btn 101", "btn 102"); order.put("btn 102", "btn 200"); order.put("btn 200", "btn 201"); order.put("btn 201", "btn 202"); order.put("btn 202", "btn 0"); order.put("jpanel0", "btn 0"); order.put("jpanel1", "btn 100"); order.put("jpanel2", "btn 200"); order.put("jframe", "btn 0"); return order; } protected Map<String, String> getBackwardOrder() { Map<String, String> order = new HashMap<String, String>(); order.put("btn 0", "btn 202"); order.put("btn 1", "btn 0"); order.put("btn 2", "btn 1"); order.put("btn 100", "btn 2"); order.put("btn 101", "btn 100"); order.put("btn 102", "btn 101"); order.put("btn 200", "btn 102"); order.put("btn 201", "btn 200"); order.put("btn 202", "btn 201"); order.put("jpanel0", "btn 202"); order.put("jpanel1", "btn 2"); order.put("jpanel2", "btn 102"); order.put("jframe", "btn 202"); return order; } protected String[] getContainersToTest() { return new String[] {"jframe"}; } protected String getDefaultComp(String focusCycleRoot_id) { return "btn 0"; } protected String getFirstComp(String focusCycleRoot_id) { return "btn 0"; } protected String getLastComp(String focusCycleRoot_id) { return "btn 202"; } } /* * frame [ comp container(provider) [...] comp ] * - transfering focus through a provider. */ class PolicyTest2 extends AbstractPolicyTest { protected Frame createFrame() { JFrame jframe = (JFrame) registerComponent("jframe", new JFrame("Test Frame")); jframe.setLayout(new FlowLayout()); jframe.add(registerComponent("btn 1", new JButton("jbutton"))); Container cont = (Container)registerComponent("jpanel", new JPanel()); cont.add(registerComponent("btn 2", new JButton("jbutton"))); cont.add(registerComponent("btn 3", new JButton("jbutton"))); jframe.add(cont); jframe.add(registerComponent("btn 4", new JButton("jbutton"))); return jframe; } protected void customizeHierarchy() { ((Container)getComponent("jframe")).setFocusTraversalPolicy(new LayoutFocusTraversalPolicy()); ((Container)getComponent("jpanel")).setFocusTraversalPolicyProvider(true); } protected Map<String, String> getForwardOrder() { Map<String, String> order = new HashMap<String, String>(); order.put("jframe", "btn 1"); order.put("btn 1", "btn 2"); order.put("btn 2", "btn 3"); order.put("btn 3", "btn 4"); order.put("btn 4", "btn 1"); order.put("jpanel", "btn 2"); return order; } protected Map<String, String> getBackwardOrder() { Map<String, String> order = new HashMap<String, String>(); order.put("btn 4", "btn 3"); order.put("btn 3", "btn 2"); order.put("btn 2", "btn 1"); order.put("btn 1", "btn 4"); return order; } protected String[] getContainersToTest() { return new String[] {"jframe", "jpanel"}; } protected String getDefaultComp(String focusCycleRoot_id) { if ("jframe".equals(focusCycleRoot_id)) { return "btn 1"; } else if ("jpanel".equals(focusCycleRoot_id)) { return "btn 2"; } return null; } protected String getFirstComp(String focusCycleRoot_id) { return getDefaultComp(focusCycleRoot_id); } protected String getLastComp(String focusCycleRoot_id) { if ("jframe".equals(focusCycleRoot_id)) { return "btn 4"; } else if ("jpanel".equals(focusCycleRoot_id)) { return "btn 3"; } return null; } } /* * frame [ comp container(root) [...] comp ] * - transfering focus through a root (includes the case reported in the CR 6240842). */ class PolicyTest3 extends AbstractPolicyTest { protected Frame createFrame() { JFrame jframe = (JFrame) registerComponent("jframe", new JFrame("Test Frame")); jframe.setLayout(new FlowLayout()); jframe.add(registerComponent("btn 1", new JButton("jbutton"))); Container cont = (Container)registerComponent("jpanel", new JPanel()); cont.add(registerComponent("btn 2", new JButton("jbutton"))); cont.add(registerComponent("btn 3", new JButton("jbutton"))); jframe.add(cont); jframe.add(registerComponent("btn 4", new JButton("jbutton"))); return jframe; } protected void customizeHierarchy() { ((Container)getComponent("jframe")).setFocusTraversalPolicy(new LayoutFocusTraversalPolicy()); ((Container)getComponent("jpanel")).setFocusCycleRoot(true); } protected Map<String, String> getForwardOrder() { Map<String, String> order = new HashMap<String, String>(); order.put("jframe", "btn 1"); order.put("btn 1", "btn 2"); order.put("btn 2", "btn 3"); order.put("btn 3", "btn 2"); order.put("btn 4", "btn 1"); order.put("jpanel", "btn 2"); return order; } protected Map<String, String> getBackwardOrder() { Map<String, String> order = new HashMap<String, String>(); order.put("btn 4", "btn 2"); order.put("btn 3", "btn 2"); order.put("btn 2", "btn 3"); order.put("btn 1", "btn 4"); return order; } protected String[] getContainersToTest() { return new String[] {"jframe", "jpanel"}; } protected String getDefaultComp(String focusCycleRoot_id) { if ("jframe".equals(focusCycleRoot_id)) { return "btn 1"; } else if ("jpanel".equals(focusCycleRoot_id)) { return "btn 2"; } return null; } protected String getFirstComp(String focusCycleRoot_id) { return getDefaultComp(focusCycleRoot_id); } protected String getLastComp(String focusCycleRoot_id) { if ("jframe".equals(focusCycleRoot_id)) { return "btn 4"; } else if ("jpanel".equals(focusCycleRoot_id)) { return "btn 3"; } return null; } } /* * frame [ container(provider) [...] comp1(unfocusable) comp2 ] * - getComponentBefore(<frame>, <comp2>) should return <container>'s last. */ class PolicyTest4 extends AbstractPolicyTest { protected Frame createFrame() { JFrame jframe = (JFrame) registerComponent("jframe", new JFrame("Test Frame")); jframe.setLayout(new FlowLayout()); Container cont = (Container)registerComponent("jpanel", new JPanel()); cont.add(registerComponent("btn 1", new JButton("jbutton"))); cont.add(registerComponent("btn 2", new JButton("jbutton"))); jframe.add(cont); jframe.add(registerComponent("btn 3", new JButton("jbutton"))); jframe.add(registerComponent("btn 4", new JButton("jbutton"))); return jframe; } protected void customizeHierarchy() { ((Container)getComponent("jframe")).setFocusTraversalPolicy(new LayoutFocusTraversalPolicy()); ((Container)getComponent("jpanel")).setFocusTraversalPolicyProvider(true); ((JButton)getComponent("btn 3")).setFocusable(false); } protected Map<String, String> getBackwardOrder() { Map<String, String> order = new HashMap<String, String>(); order.put("btn 4", "btn 2"); order.put("btn 2", "btn 1"); order.put("btn 1", "btn 4"); return order; } // no testing protected Map<String, String> getForwardOrder() { return null; } protected String[] getContainersToTest() { return null; } protected String getDefaultComp(String focusCycleRoot_id) { return null; } protected String getFirstComp(String focusCycleRoot_id) { return null; } protected String getLastComp(String focusCycleRoot_id) { return null; } } /* * frame [ container(root) [...] comp1(unfocusable) comp2 ] * - getComponentBefore(<frame>, <comp2>) should return <container>'s default. */ class PolicyTest5 extends AbstractPolicyTest { protected Frame createFrame() { JFrame jframe = (JFrame) registerComponent("jframe", new JFrame("Test Frame")); jframe.setLayout(new FlowLayout()); Container cont = (Container)registerComponent("jpanel", new JPanel()); cont.add(registerComponent("btn 1", new JButton("jbutton"))); cont.add(registerComponent("btn 2", new JButton("jbutton"))); jframe.add(cont); jframe.add(registerComponent("btn 3", new JButton("jbutton"))); jframe.add(registerComponent("btn 4", new JButton("jbutton"))); return jframe; } protected void customizeHierarchy() { ((Container)getComponent("jframe")).setFocusTraversalPolicy(new LayoutFocusTraversalPolicy()); ((Container)getComponent("jpanel")).setFocusCycleRoot(true); ((JButton)getComponent("btn 3")).setFocusable(false); } protected Map<String, String> getBackwardOrder() { Map<String, String> order = new HashMap<String, String>(); order.put("btn 4", "btn 1"); order.put("btn 2", "btn 1"); order.put("btn 1", "btn 2"); return order; } // no testing protected Map<String, String> getForwardOrder() { return null; } protected String[] getContainersToTest() { return null; } protected String getDefaultComp(String focusCycleRoot_id) { return null; } protected String getFirstComp(String focusCycleRoot_id) { return null; } protected String getLastComp(String focusCycleRoot_id) { return null; } } /* * frame [ comp container(provider)(focusable) [...] comp ] * - transfering focus through a focusable provider. */ class PolicyTest6 extends AbstractPolicyTest { protected Frame createFrame() { JFrame jframe = (JFrame) registerComponent("jframe", new JFrame("Test Frame")); jframe.setLayout(new FlowLayout()); jframe.add(registerComponent("btn 1", new JButton("jbutton"))); Container cont = (Container)registerComponent("jpanel", new JPanel()); cont.add(registerComponent("btn 2", new JButton("jbutton"))); cont.add(registerComponent("btn 3", new JButton("jbutton"))); jframe.add(cont); jframe.add(registerComponent("btn 4", new JButton("jbutton"))); return jframe; } protected void customizeHierarchy() { ((Container)getComponent("jframe")).setFocusTraversalPolicy(new LayoutFocusTraversalPolicy()); ((Container)getComponent("jpanel")).setFocusTraversalPolicy(new LayoutFocusTraversalPolicy() { public Component getDefaultComponent(Container aContainer) { return getComponent("btn 2"); } }); ((Container)getComponent("jpanel")).setFocusTraversalPolicyProvider(true); ((Container)getComponent("jpanel")).setFocusable(true); } protected Map<String, String> getForwardOrder() { Map<String, String> order = new HashMap<String, String>(); order.put("jframe", "btn 1"); order.put("btn 1", "jpanel"); order.put("btn 2", "btn 3"); order.put("btn 3", "btn 4"); order.put("btn 4", "btn 1"); order.put("jpanel", "btn 2"); return order; } protected Map<String, String> getBackwardOrder() { Map<String, String> order = new HashMap<String, String>(); order.put("btn 4", "btn 3"); order.put("btn 3", "btn 2"); order.put("btn 2", "jpanel"); order.put("btn 1", "btn 4"); order.put("jpanel", "btn 1"); return order; } protected String[] getContainersToTest() { return new String[] {"jpanel"}; } protected String getDefaultComp(String focusCycleRoot_id) { return "btn 2"; } protected String getFirstComp(String focusCycleRoot_id) { return "jpanel"; } protected String getLastComp(String focusCycleRoot_id) { return "btn 3"; } } /* * frame [ comp container(root)(focusable) [...] comp ] * - transfering focus through a focusable root. */ class PolicyTest7 extends AbstractPolicyTest { protected Frame createFrame() { JFrame jframe = (JFrame) registerComponent("jframe", new JFrame("Test Frame")); jframe.setLayout(new FlowLayout()); jframe.add(registerComponent("btn 1", new JButton("jbutton"))); Container cont = (Container)registerComponent("jpanel", new JPanel()); cont.add(registerComponent("btn 2", new JButton("jbutton"))); cont.add(registerComponent("btn 3", new JButton("jbutton"))); jframe.add(cont); jframe.add(registerComponent("btn 4", new JButton("jbutton"))); return jframe; } protected void customizeHierarchy() { ((Container)getComponent("jframe")).setFocusTraversalPolicy(new LayoutFocusTraversalPolicy()); ((Container)getComponent("jpanel")).setFocusTraversalPolicy(new LayoutFocusTraversalPolicy() { public Component getDefaultComponent(Container aContainer) { return getComponent("btn 2"); } }); ((Container)getComponent("jpanel")).setFocusCycleRoot(true); ((Container)getComponent("jpanel")).setFocusable(true); } protected Map<String, String> getForwardOrder() { Map<String, String> order = new HashMap<String, String>(); order.put("jframe", "btn 1"); order.put("btn 1", "jpanel"); order.put("btn 2", "btn 3"); order.put("btn 3", "jpanel"); order.put("btn 4", "btn 1"); order.put("jpanel", "btn 2"); return order; } protected Map<String, String> getBackwardOrder() { Map<String, String> order = new HashMap<String, String>(); order.put("btn 4", "btn 2"); order.put("btn 3", "btn 2"); order.put("btn 2", "jpanel"); order.put("btn 1", "btn 4"); order.put("jpanel", "btn 1"); return order; } protected String[] getContainersToTest() { return new String[] {"jpanel"}; } protected String getDefaultComp(String focusCycleRoot_id) { return "btn 2"; } protected String getFirstComp(String focusCycleRoot_id) { return "jpanel"; } protected String getLastComp(String focusCycleRoot_id) { return "btn 3"; } } /* * frame [ comp1 comp2 container1(provider) [...] container2(root) [...] ] * - verifies a case when a provider is followed by a root. */ class PolicyTest8 extends AbstractPolicyTest { protected Frame createFrame() { JFrame jframe = (JFrame) registerComponent("frame", new JFrame("Test Frame")); jframe.setLayout(new FlowLayout()); jframe.add(registerComponent("btn-1", new JButton("jbutton"))); jframe.add(registerComponent("btn-2", new JButton("jbutton"))); Container cont1 = (Container)registerComponent("panel-1", new JPanel()); cont1.add(registerComponent("btn-3", new JButton("jbutton"))); cont1.add(registerComponent("btn-4", new JButton("jbutton"))); cont1.add(registerComponent("btn-5", new JButton("jbutton"))); Container cont2 = (Container)registerComponent("panel-2", new JPanel()); cont2.add(registerComponent("btn-6", new JButton("jbutton"))); cont2.add(registerComponent("btn-7", new JButton("jbutton"))); cont2.add(registerComponent("btn-8", new JButton("jbutton"))); jframe.add(cont1); jframe.add(cont2); return jframe; } protected void customizeHierarchy() { ((Container)getComponent("panel-1")).setFocusTraversalPolicyProvider(true); ((Container)getComponent("panel-1")).setFocusTraversalPolicy(new LayoutFocusTraversalPolicy() { public Component getDefaultComponent(Container aContainer) { return getComponent("btn-4"); } }); ((Container)getComponent("panel-2")).setFocusCycleRoot(true); ((Container)getComponent("panel-2")).setFocusTraversalPolicy(new LayoutFocusTraversalPolicy() { public Component getDefaultComponent(Container aContainer) { return getComponent("btn-7"); } }); } protected Map<String, String> getForwardOrder() { Map<String, String> order = new HashMap<String, String>(); order.put("frame", "btn-1"); order.put("btn-1", "btn-2"); order.put("btn-2", "btn-4"); order.put("btn-3", "btn-4"); order.put("btn-4", "btn-5"); order.put("btn-5", "btn-7"); order.put("btn-6", "btn-7"); order.put("btn-7", "btn-8"); order.put("btn-8", "btn-6"); order.put("panel-1", "btn-4"); order.put("panel-2", "btn-7"); return order; } protected Map<String, String> getBackwardOrder() { Map<String, String> order = new HashMap<String, String>(); order.put("btn-1", "btn-5"); order.put("btn-2", "btn-1"); order.put("btn-3", "btn-2"); order.put("btn-4", "btn-3"); order.put("btn-5", "btn-4"); order.put("btn-6", "btn-8"); order.put("btn-7", "btn-6"); order.put("btn-8", "btn-7"); return order; } protected String[] getContainersToTest() { return new String[] {"frame", "panel-1", "panel-2"}; } protected String getDefaultComp(String focusCycleRoot_id) { if ("frame".equals(focusCycleRoot_id)) { return "btn-1"; } else if ("panel-1".equals(focusCycleRoot_id)) { return "btn-4"; } else if ("panel-2".equals(focusCycleRoot_id)) { return "btn-7"; } return null; } protected String getFirstComp(String focusCycleRoot_id) { if ("frame".equals(focusCycleRoot_id)) { return "btn-1"; } else if ("panel-1".equals(focusCycleRoot_id)) { return "btn-3"; } else if ("panel-2".equals(focusCycleRoot_id)) { return "btn-6"; } return null; } protected String getLastComp(String focusCycleRoot_id) { if ("frame".equals(focusCycleRoot_id)) { return "btn-5"; } else if ("panel-1".equals(focusCycleRoot_id)) { return "btn-5"; } else if ("panel-2".equals(focusCycleRoot_id)) { return "btn-8"; } return null; } } /* * frame [ comp1 comp2 container1(root) [...] container2(provider) [...] ] * - verifies a case when a root is followed by a provider. */ class PolicyTest9 extends AbstractPolicyTest { protected Frame createFrame() { JFrame jframe = (JFrame) registerComponent("frame", new JFrame("Test Frame")); jframe.setLayout(new FlowLayout()); jframe.add(registerComponent("btn-1", new JButton("jbutton"))); jframe.add(registerComponent("btn-2", new JButton("jbutton"))); Container cont1 = (Container)registerComponent("panel-1", new JPanel()); cont1.add(registerComponent("btn-3", new JButton("jbutton"))); cont1.add(registerComponent("btn-4", new JButton("jbutton"))); cont1.add(registerComponent("btn-5", new JButton("jbutton"))); Container cont2 = (Container)registerComponent("panel-2", new JPanel()); cont2.add(registerComponent("btn-6", new JButton("jbutton"))); cont2.add(registerComponent("btn-7", new JButton("jbutton"))); cont2.add(registerComponent("btn-8", new JButton("jbutton"))); jframe.add(cont1); jframe.add(cont2); return jframe; } protected void customizeHierarchy() { ((Container)getComponent("panel-1")).setFocusCycleRoot(true); ((Container)getComponent("panel-1")).setFocusTraversalPolicy(new LayoutFocusTraversalPolicy() { public Component getDefaultComponent(Container aContainer) { return getComponent("btn-4"); } }); ((Container)getComponent("panel-2")).setFocusTraversalPolicyProvider(true); ((Container)getComponent("panel-2")).setFocusTraversalPolicy(new LayoutFocusTraversalPolicy() { public Component getDefaultComponent(Container aContainer) { return getComponent("btn-7"); } }); } protected Map<String, String> getForwardOrder() { Map<String, String> order = new HashMap<String, String>(); order.put("frame", "btn-1"); order.put("btn-1", "btn-2"); order.put("btn-2", "btn-4"); order.put("btn-3", "btn-4"); order.put("btn-4", "btn-5"); order.put("btn-5", "btn-3"); order.put("btn-6", "btn-7"); order.put("btn-7", "btn-8"); order.put("btn-8", "btn-1"); order.put("panel-1", "btn-4"); order.put("panel-2", "btn-7"); return order; } protected Map<String, String> getBackwardOrder() { Map<String, String> order = new HashMap<String, String>(); order.put("btn-1", "btn-8"); order.put("btn-2", "btn-1"); order.put("btn-3", "btn-5"); order.put("btn-4", "btn-3"); order.put("btn-5", "btn-4"); order.put("btn-6", "btn-4"); order.put("btn-7", "btn-6"); order.put("btn-8", "btn-7"); return order; } protected String[] getContainersToTest() { return new String[] {"frame", "panel-1", "panel-2"}; } protected String getDefaultComp(String focusCycleRoot_id) { if ("frame".equals(focusCycleRoot_id)) { return "btn-1"; } else if ("panel-1".equals(focusCycleRoot_id)) { return "btn-4"; } else if ("panel-2".equals(focusCycleRoot_id)) { return "btn-7"; } return null; } protected String getFirstComp(String focusCycleRoot_id) { if ("frame".equals(focusCycleRoot_id)) { return "btn-1"; } else if ("panel-1".equals(focusCycleRoot_id)) { return "btn-3"; } else if ("panel-2".equals(focusCycleRoot_id)) { return "btn-6"; } return null; } protected String getLastComp(String focusCycleRoot_id) { if ("frame".equals(focusCycleRoot_id)) { return "btn-8"; } else if ("panel-1".equals(focusCycleRoot_id)) { return "btn-5"; } else if ("panel-2".equals(focusCycleRoot_id)) { return "btn-8"; } return null; } } /* * frame [ container0 [...] container1(root) [ comp1 comp2 container2(provider) [...] ] ] * - verifies a case when a provider is nested in a root. */ class PolicyTest10 extends AbstractPolicyTest { protected Frame createFrame() { JFrame jframe = (JFrame) registerComponent("frame", new JFrame("Test Frame")); jframe.setLayout(new GridLayout(2, 1)); Container cont0 = new JPanel(); cont0.add(registerComponent("btn-1", new JButton("jbutton"))); cont0.add(registerComponent("btn-2", new JButton("jbutton"))); Container cont1 = (Container)registerComponent("panel-1", new JPanel()); cont1.add(registerComponent("btn-3", new JButton("jbutton"))); cont1.add(registerComponent("btn-4", new JButton("jbutton"))); Container cont2 = (Container)registerComponent("panel-2", new JPanel()); cont2.add(registerComponent("btn-5", new JButton("jbutton"))); cont2.add(registerComponent("btn-6", new JButton("jbutton"))); cont1.add(cont2); jframe.add(cont0); jframe.add(cont1); return jframe; } protected void customizeHierarchy() { ((Container)getComponent("panel-1")).setFocusCycleRoot(true); ((Container)getComponent("panel-1")).setFocusTraversalPolicy(new LayoutFocusTraversalPolicy() { public Component getDefaultComponent(Container aContainer) { return getComponent("panel-2"); } }); ((Container)getComponent("panel-2")).setFocusTraversalPolicyProvider(true); ((Container)getComponent("panel-2")).setFocusTraversalPolicy(new LayoutFocusTraversalPolicy()); } protected Map<String, String> getForwardOrder() { Map<String, String> order = new HashMap<String, String>(); order.put("frame", "btn-1"); order.put("btn-1", "btn-2"); order.put("btn-2", "panel-2"); order.put("btn-3", "btn-4"); order.put("btn-4", "btn-5"); order.put("btn-5", "btn-6"); order.put("btn-6", "btn-3"); order.put("panel-1", "panel-2"); order.put("panel-2", "btn-5"); return order; } protected Map<String, String> getBackwardOrder() { Map<String, String> order = new HashMap<String, String>(); order.put("btn-1", "btn-2"); order.put("btn-2", "btn-1"); order.put("btn-3", "btn-6"); order.put("btn-4", "btn-3"); order.put("btn-5", "btn-4"); order.put("btn-6", "btn-5"); return order; } protected String[] getContainersToTest() { return new String[] {"frame", "panel-1", "panel-2"}; } protected String getDefaultComp(String focusCycleRoot_id) { if ("frame".equals(focusCycleRoot_id)) { return "btn-1"; } else if ("panel-1".equals(focusCycleRoot_id)) { return "panel-2"; } else if ("panel-2".equals(focusCycleRoot_id)) { return "btn-5"; } return null; } protected String getFirstComp(String focusCycleRoot_id) { if ("frame".equals(focusCycleRoot_id)) { return "btn-1"; } else if ("panel-1".equals(focusCycleRoot_id)) { return "btn-3"; } else if ("panel-2".equals(focusCycleRoot_id)) { return "btn-5"; } return null; } protected String getLastComp(String focusCycleRoot_id) { if ("frame".equals(focusCycleRoot_id)) { return "btn-2"; } else { return "btn-6"; } } } /* * frame [ container(root) [...] comp ] * - getDefaultComponent(<frame>) should implicitly down-cycle into the <container>. * - getFirstComponent(<frame>) should implicitly down-cycle into the <container>. */ class PolicyTest11 extends AbstractPolicyTest { protected Frame createFrame() { JFrame jframe = (JFrame) registerComponent("jframe", new JFrame("Test Frame")); jframe.setLayout(new FlowLayout()); Container cont = (Container)registerComponent("jpanel", new JPanel()); cont.add(registerComponent("btn-1", new JButton("jbutton"))); cont.add(registerComponent("btn-2", new JButton("jbutton"))); jframe.add(cont); jframe.add(registerComponent("btn-3", new JButton("jbutton"))); return jframe; } protected void customizeHierarchy() { ((Container)getComponent("jframe")).setFocusTraversalPolicy(new LayoutFocusTraversalPolicy()); ((Container)getComponent("jpanel")).setFocusCycleRoot(true); } protected Map<String, String> getForwardOrder() { Map<String, String> order = new HashMap<String, String>(); order.put("jframe", "btn-1"); order.put("btn-1", "btn-2"); order.put("btn-2", "btn-1"); order.put("btn-3", "btn-1"); return order; } protected Map<String, String> getBackwardOrder() { Map<String, String> order = new HashMap<String, String>(); order.put("btn-3", "btn-1"); order.put("btn-2", "btn-1"); order.put("btn-1", "btn-2"); order.put("jframe", "btn-3"); return order; } protected String[] getContainersToTest() { return new String[] {"jframe"}; } protected String getDefaultComp(String focusCycleRoot_id) { return "btn-1"; } protected String getFirstComp(String focusCycleRoot_id) { return "btn-1"; } protected String getLastComp(String focusCycleRoot_id) { return "btn-3"; } }
oracle/coherence
35,020
prj/coherence-core/src/main/java/com/tangosol/io/MultiBufferReadBuffer.java
/* * Copyright (c) 2000, 2024, Oracle and/or its affiliates. * * Licensed under the Universal Permissive License v 1.0 as shown at * https://oss.oracle.com/licenses/upl. */ package com.tangosol.io; import com.tangosol.io.nio.ByteBufferOutputStream; import com.tangosol.util.Binary; import java.io.DataOutput; import java.io.DataOutputStream; import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; import static com.oracle.coherence.common.base.Exceptions.ensureRuntimeException; /** * The MultiBufferReadBuffer is a ReadBuffer implementation that presents a * view across any number of underlying ReadBuffer objects, as if they were * appended end-to-end into a single ReadBuffer. * * @author cp 2006.04.15 */ public class MultiBufferReadBuffer extends AbstractReadBuffer { // ----- constructors --------------------------------------------------- /** * Construct a MultiBufferReadBuffer from an array of underlying * ReadBuffer objects. * * @param abuf an array of ReadBuffer objects from which to construct * this MultiBufferReadBuffer */ public MultiBufferReadBuffer(ReadBuffer[] abuf) { abuf = abuf.clone(); int cBuffers = abuf.length; int[] aof = new int[cBuffers]; int cb = 0; for (int i = 0; i < cBuffers; ++i) { aof[i] = cb; int cbBuf = abuf[i].length(); if (cb + cbBuf < cb) { // integer overflow throw new IllegalArgumentException("cumulative buffer length exceeds 2GB"); } cb += cbBuf; } m_abuf = abuf; m_aofBuffer = aof; m_ofStart = 0; m_ofEnd = cb; } /** * Construct a MultiBufferReadBuffer from its constituent members. This * is a package-private constructor intended for use by the * MultiBufferWriteBuffer and the MultiBufferReadBuffer itself. Note that * this implementation holds onto the passed array references. * * @param abuf an array of underlying ReadBuffer objects containing * the data that this MultiBufferReadBuffer represents * @param aofBuffer the absolute offset of the first byte of each * ReadBuffer passed in <tt>abuf</tt> * @param ofStart the absolute offset into the virtual ReadBuffer that * corresponds to the zero offset of this * MultiBufferReadBuffer * @param ofEnd the absolute offset into the virtual ReadBuffer that * corresponds to the first byte beyond the bounds of * this MultiBufferReadBuffer */ MultiBufferReadBuffer(ReadBuffer[] abuf, int[] aofBuffer, int ofStart, int ofEnd) { m_abuf = abuf; m_aofBuffer = aofBuffer; m_ofStart = ofStart; m_ofEnd = ofEnd; } // ----- MultiBufferReadBuffer interface -------------------------------- /** * Return a self-destructing BufferInput over this Buffer. * * As the BufferInput is advanced the individual buffer segments will be * released allowing them to potentially be garbage collected. * * @return a destructed BufferInput */ public BufferInput getDestructiveBufferInput() { return instantiateBufferInput(/*fDestructive*/ true); } // ----- ReadBuffer interface ------------------------------------------- /** * {@inheritDoc} */ public void writeTo(OutputStream out) throws IOException { writeTo((DataOutput) new DataOutputStream(out)); } /** * {@inheritDoc} */ public void writeTo(OutputStream out, int of, int cb) throws IOException { writeTo((DataOutput) new DataOutputStream(out), of, cb); } /** * {@inheritDoc} */ public void writeTo(DataOutput out) throws IOException { writeTo(out, 0, length()); } /** * {@inheritDoc} */ public void writeTo(DataOutput out, int of, int cb) throws IOException { if (length() == 0 || cb == 0) { // nop return; } int iBufFirst = getBufferIndexByOffset(of); int iBufLast = getBufferIndexByOffset(of + cb); for (int iBuf = iBufFirst; iBuf <= iBufLast; iBuf++) { ReadBuffer buf = getBuffer(iBuf); int cbBuf = buf.length(); if (iBuf == iBufFirst) { int ofSrc = getBufferOffset(iBuf); // ofSrc <= of buf = buf.getReadBuffer(of - ofSrc, cbBuf + ofSrc); cbBuf += ofSrc; } else if (iBuf == iBufLast) { buf = buf.getReadBuffer(0, cb); } buf.writeTo(out); cb -= cbBuf; } } /** * {@inheritDoc} */ public void writeTo(ByteBuffer buf) { try { writeTo(new ByteBufferOutputStream(buf)); } catch (IOException e) { throw ensureRuntimeException(e); } } /** * {@inheritDoc} */ public void writeTo(ByteBuffer buf, int of, int cb) throws IOException { writeTo(new ByteBufferOutputStream(buf), of, cb); } /** * {@inheritDoc} */ public int length() { return m_ofEnd - m_ofStart; } /** * {@inheritDoc} */ public byte byteAt(int of) { checkBounds(of, 1); int iBuf = getBufferIndexByOffset(of); return getBuffer(iBuf).byteAt(of - getBufferOffset(iBuf)); } /** * {@inheritDoc} */ public void copyBytes(int ofBegin, int ofEnd, byte abDest[], int ofDest) { int cbDest = ofEnd - ofBegin; checkBounds(ofBegin, cbDest); if (ofDest < 0 || ofDest + cbDest > abDest.length) { throw new IndexOutOfBoundsException("ofDest=" + ofDest + ", abDest.length=" + abDest.length + ", bytes requested=" + cbDest); } int iBuf = getBufferIndexByOffset(ofBegin); ReadBuffer buf = getBuffer(iBuf); int ofBuf = getBufferOffset(iBuf); int ofSrc = ofBegin - ofBuf; int cbSrc = Math.min(cbDest, buf.length() - ofSrc); buf.copyBytes(ofSrc, ofSrc + cbSrc, abDest, ofDest); ofDest += cbSrc; cbDest -= cbSrc; while (cbDest > 0) { buf = getBuffer(++iBuf); cbSrc = Math.min(cbDest, buf.length()); buf.copyBytes(0, cbSrc, abDest, ofDest); ofDest += cbSrc; cbDest -= cbSrc; } } /** * {@inheritDoc} */ public byte[] toByteArray(int of, int cb) { checkBounds(of, cb); if (cb == 0) { return NO_BYTES; } int iBuf = getBufferIndexByOffset(of); return iBuf == getBufferIndexByOffset(of + cb - 1) ? getBuffer(iBuf).toByteArray(of - getBufferOffset(iBuf), cb) : super.toByteArray(of, cb); } /** * {@inheritDoc} */ public Binary toBinary(int of, int cb) { checkBounds(of, cb); if (cb == 0) { return NO_BINARY; } int iBuf = getBufferIndexByOffset(of); return iBuf == getBufferIndexByOffset(of + cb - 1) ? getBuffer(iBuf).toBinary(of - getBufferOffset(iBuf), cb) : super.toBinary(of, cb); } /** * {@inheritDoc} */ public ByteBuffer toByteBuffer() { return toByteBuffer(0, length()); } /** * {@inheritDoc} */ public ByteBuffer toByteBuffer(int of, int cb) { int iBuf = getBufferIndexByOffset(of); if (iBuf == getBufferIndexByOffset(of + cb - 1)) { return getBuffer(iBuf).toByteBuffer(of - getBufferOffset(iBuf), cb); } return ByteBuffer.wrap(toByteArray(of, cb)).asReadOnlyBuffer(); } // ----- Object methods ------------------------------------------------- /** * {@inheritDoc} */ public boolean equals(Object o) { if (o == this) { return true; } if (o instanceof ReadBuffer) { ReadBuffer bufThat = (ReadBuffer) o; int cbThis = length(); int cbThat = bufThat.length(); if (cbThis != cbThat) { return false; } if (cbThat == 0) { return true; } int iBufFirst = getBufferIndexByOffset(0); int iBufLast = getBufferIndexByOffset(cbThis); int of = 0; for (int iBuf = iBufFirst; iBuf <= iBufLast; iBuf++) { ReadBuffer buf = getBuffer(iBuf); int cb = buf.length(); if (iBuf == iBufFirst) { int ofSrc = getBufferOffset(iBuf); // ofSrc <= 0 buf = buf.getReadBuffer(of - ofSrc, cb + ofSrc); cb += ofSrc; } else if (iBuf == iBufLast) { buf = buf.getReadBuffer(0, cbThis - of); cb = cbThis - of; } if (!buf.equals(bufThat.getReadBuffer(of, cb))) { return false; } of += cb; } return true; } return false; } // ----- factory methods ------------------------------------------------ /** * {@inheritDoc} */ protected ReadBuffer instantiateReadBuffer(int of, int cb) { checkBounds(of, cb); if (cb == 0) { return NO_BINARY; } // calculate which underlying buffers will compose the new // MultiBufferReadBuffer int iBufFirst = getBufferIndexByOffset(of); int iBufLast = getBufferIndexByOffset(of + cb - 1); // adjust offset to be relative to the first buffer that will compose // the new MultiBufferReadBuffer of -= getBufferOffset(iBufFirst); // check if the new buffer could be created without the use of the // MultiBufferReadBuffer implementation (i.e. it's a "single" buffer) if (iBufFirst == iBufLast) { ReadBuffer buf = getBuffer(iBufFirst); return cb == buf.length() ? buf : buf.getReadBuffer(of, cb); } // otherwise, build the list of underlying ReadBuffers that the new // MultiBufferReadBuffer will be composed of int cBuffers = iBufLast - iBufFirst + 1; ReadBuffer[] abuf = new ReadBuffer[cBuffers]; int[] aof = new int[cBuffers]; int cbTotal = 0; for (int i = 0; i < cBuffers; ++i) { ReadBuffer buf = getBuffer(iBufFirst + i); abuf[i] = buf; aof [i] = cbTotal; cbTotal += buf.length(); } return new MultiBufferReadBuffer(abuf, aof, of, of + cb); } /** * {@inheritDoc} */ protected BufferInput instantiateBufferInput() { return instantiateBufferInput(/*fDestructive*/ false); } /** * Factory method: Instantiate a BufferInput object to read data from the * ReadBuffer. * * @param fDestructive true iff the BufferInput should self-destruct as it * is advanced * * @return a new BufferInput reading from this ReadBuffer */ protected BufferInput instantiateBufferInput(boolean fDestructive) { return new MultiBufferInput(fDestructive); } // ----- inner class: MultiBufferInput ---------------------------------- /** * An implementation of the BufferInput interface that is backed by a * series of the underlying ReadBuffer BufferInput objects. */ public final class MultiBufferInput extends AbstractBufferInput { // ----- constructors ------------------------------------------- /** * Default constructor. */ public MultiBufferInput() { this(/*fDestructive*/ false); } /** * Default constructor. * * @param fDestructive true iff the stream should self-destruct as it * is advanced */ public MultiBufferInput(boolean fDestructive) { m_fDestructive = fDestructive; // initialize the stream sync(); } // ----- InputStreaming methods --------------------------------- /** * {@inheritDoc} */ public int read() throws IOException { int b; BufferInput in = m_in; if (in.available() >= 1) { b = in.read(); adjustOffsetInternal(1); } else { b = super.read(); sync(); } return b; } /** * {@inheritDoc} */ public int read(byte ab[], int of, int cb) throws IOException { int cbActual; BufferInput in = m_in; if (in.available() >= cb) { cbActual = in.read(ab, of, cb); assert cbActual == cb; adjustOffsetInternal(cbActual); } else { cbActual = super.read(ab, of, cb); sync(); } return cbActual; } /** * {@inheritDoc} */ public void reset() throws IOException { int ofMark = getMarkInternal(); if (ofMark < 0) { throw new IOException("not marked"); } // optimization: does the reset of the location occur within // the current buffer? BufferInput in = getIn(); int of = getOffset(); if (of > ofMark) { int cbRewind = of - ofMark; int ofCurrent = in.getOffset(); if (cbRewind < ofCurrent) { in.setOffset(ofCurrent - cbRewind); adjustOffsetInternal(-cbRewind); return; } } else if (of < ofMark) { int cbForward = ofMark - of; if (cbForward < in.available()) { in.skipBytes(cbForward); adjustOffsetInternal(cbForward); return; } } else { return; } super.reset(); sync(); } // ----- DataInput methods -------------------------------------- /** * {@inheritDoc} */ public int skipBytes(int cb) throws IOException { int cbActual; BufferInput in = m_in; if (in.available() >= cb) { cbActual = in.skipBytes(cb); assert cbActual == cb; adjustOffsetInternal(cbActual); } else { cbActual = super.skipBytes(cb); sync(); } return cbActual; } /** * {@inheritDoc} */ public byte readByte() throws IOException { byte b; BufferInput in = m_in; if (in.available() >= 1) { b = in.readByte(); adjustOffsetInternal(1); } else { b = super.readByte(); sync(); } return b; } /** * {@inheritDoc} */ public short readShort() throws IOException { short n; BufferInput in = m_in; if (in.available() >= 2) { n = in.readShort(); adjustOffsetInternal(2); } else { n = super.readShort(); sync(); } return n; } /** * {@inheritDoc} */ public int readUnsignedShort() throws IOException { int n; BufferInput in = m_in; if (in.available() >= 2) { n = in.readUnsignedShort(); adjustOffsetInternal(2); } else { n = super.readUnsignedShort(); sync(); } return n; } /** * {@inheritDoc} */ public char readChar() throws IOException { char ch; BufferInput in = m_in; if (in.available() >= 2) { ch = in.readChar(); adjustOffsetInternal(2); } else { ch = super.readChar(); sync(); } return ch; } /** * {@inheritDoc} */ public int readInt() throws IOException { int n; BufferInput in = getIn(); if (in.available() >= 4) { n = in.readInt(); adjustOffsetInternal(4); } else { n = super.readInt(); sync(); } return n; } /** * {@inheritDoc} */ public long readLong() throws IOException { long l; BufferInput in = m_in; if (in.available() >= 8) { l = in.readLong(); adjustOffsetInternal(8); } else { l = super.readLong(); sync(); } return l; } /** * {@inheritDoc} */ public float readFloat() throws IOException { float fl; BufferInput in = m_in; if (in.available() >= 4) { fl = in.readFloat(); adjustOffsetInternal(4); } else { fl = super.readFloat(); sync(); } return fl; } /** * {@inheritDoc} */ public double readDouble() throws IOException { double dfl; BufferInput in = m_in; if (in.available() >= 8) { dfl = in.readDouble(); adjustOffsetInternal(8); } else { dfl = super.readDouble(); sync(); } return dfl; } /** * {@inheritDoc} */ public String readUTF() throws IOException { BufferInput in = m_in; int cbAvail = in.available(); int cbChars; if (cbAvail >= 2) { int ofBefore = in.getOffset(); cbChars = in.readUnsignedShort(); int cbTotal = 2 + cbChars; if (cbAvail >= cbTotal) { in.setOffset(ofBefore); String s = in.readUTF(); adjustOffsetInternal(cbTotal); return s; } else { // not enough bytes left to read the String, so update // the offset to reflect that we read the String length adjustOffsetInternal(2); } } else { cbChars = readUnsignedShort(); } // do a virtual read of the String itself (i.e. across a buffer // boundary) String s = readUTF(cbChars); sync(); return s; } // ----- BufferInput methods ------------------------------------ /** * {@inheritDoc} */ public String readSafeUTF() throws IOException { BufferInput in = m_in; int cbAvail = in.available(); int cbChars; if (cbAvail >= 5) { int ofBefore = in.getOffset(); cbChars = in.readPackedInt(); // WARNING: -1 == null String int cbLength = in.getOffset() - ofBefore; int cbTotal = cbLength + cbChars; if (cbChars > 0 && cbAvail >= cbTotal) { in.setOffset(ofBefore); String s = in.readSafeUTF(); adjustOffsetInternal(cbTotal); return s; } else { // not enough bytes left to read the String, so update // the offset to reflect that we read the String length adjustOffsetInternal(cbLength); } } else { cbChars = readPackedInt(); } // do a virtual read of the String itself (i.e. across a buffer // boundary) String s = readUTF(cbChars); if (cbChars > 0) { sync(); } return s; } /** * {@inheritDoc} */ public int readPackedInt() throws IOException { int n; BufferInput in = m_in; if (in.available() >= 5) { int of = in.getOffset(); n = in.readPackedInt(); adjustOffsetInternal(in.getOffset() - of); } else { n = super.readPackedInt(); sync(); } return n; } /** * {@inheritDoc} */ public long readPackedLong() throws IOException { long l; BufferInput in = m_in; if (in.available() >= 10) { int of = in.getOffset(); l = in.readPackedLong(); adjustOffsetInternal(in.getOffset() - of); } else { l = super.readPackedLong(); sync(); } return l; } /** * {@inheritDoc} */ public ReadBuffer readBuffer(int cb) throws IOException { ReadBuffer buf; BufferInput in = m_in; if (in.available() >= cb) { buf = in.readBuffer(cb); adjustOffsetInternal(cb); } else { buf = super.readBuffer(cb); sync(); } return buf; } /** * {@inheritDoc} */ public void setOffset(int of) { // optimization: is the offset within the current buffer? BufferInput in = getIn(); int ofCur = getOffset(); if (ofCur > of) { int cbRewind = ofCur - of; int ofCurrent = in.getOffset(); if (cbRewind < ofCurrent) { in.setOffset(ofCurrent - cbRewind); adjustOffsetInternal(-cbRewind); return; } } else if (ofCur < of) { int cbForward = of - ofCur; try { if (cbForward < in.available()) { in.skipBytes(cbForward); adjustOffsetInternal(cbForward); return; } } catch (IOException e) { throw ensureRuntimeException(e); } } else { return; } super.setOffset(of); sync(); } // ----- internal ----------------------------------------------- /** * Obtain the underlying BufferOutput. * * @return the underlying BufferOutput */ protected BufferInput getIn() { return m_in; } /** * After traversing an underlying WriteBuffer boundary, or otherwise * changing the offset significantly, sync between this BufferOutput's * absolute position and an underlying BufferOutput's relative * position. */ protected void sync() { MultiBufferReadBuffer bufMulti = MultiBufferReadBuffer.this; // absolute offset of this BufferInput int of = getOffset(); // find the underlying WriteBuffer for that offset int iBuf = bufMulti.getBufferIndexByOffset(of); ReadBuffer buf = bufMulti.getBuffer(iBuf); // convert the absolute offset to the underlying buffer's // relative offset of -= bufMulti.getBufferOffset(iBuf); BufferInput inPrev = m_in; if (inPrev != null && buf == inPrev.getBuffer()) { // still inside the previous underlying ReadBuffer inPrev.setOffset(of); } else { // traversed to the next (or some subsequent) underlying // ReadBuffer; if this buffer supports destructive streaming, // then release any previously streamed sub-buffers if (m_fDestructive) { int ofMark = getMarkInternal(); if (ofMark >= 0) { // mark is in place; only allow destruction before // the buffer containing the mark iBuf = Math.min(iBuf, bufMulti.getBufferIndexByOffset(ofMark) - 1); } // release previous buffers while (--iBuf >= 0 && bufMulti.releaseBuffer(iBuf) != null) { } } // store the new underlying BufferInput and adjust the offset BufferInput in = buf.getBufferInput(); m_in = in; in.setOffset(of); } } // ----- data members ------------------------------------------- /** * The current underlying BufferInput object. */ private BufferInput m_in; /** * True if the BufferInput set to self-destruct. */ protected boolean m_fDestructive; } // ----- internal ------------------------------------------------------- /** * Determine the number of ReadBuffer objects that contain the data * presented by this MultiBufferReadBuffer. * * @return the count of underlying ReadBuffer objects */ protected int getBufferCount() { return m_abuf.length; } /** * Determine the offset of the specified buffer. The offset of a buffer * is the absolute offset of the first byte stored in the buffer. * * @param iBuffer an index <tt>0 &lt;= iBuffer &lt; getBufferCount()</tt> * * @return the absolute offset of the first byte of the specified * ReadBuffer */ protected int getBufferOffset(int iBuffer) { return m_aofBuffer[iBuffer] - m_ofStart; } /** * Obtain the specified buffer. * * @param iBuffer an index <tt>0 &lt;= iBuffer &lt; getBufferCount()</tt> * * @return the specified ReadBuffer */ protected ReadBuffer getBuffer(int iBuffer) { ReadBuffer buf = m_abuf[iBuffer]; if (buf == null) { throw new IndexOutOfBoundsException( "the requested buffer '" + iBuffer + "' has been released"); } return buf; } /** * Release the specified buffer. * * Once released any operation requiring access to overall buffer segment * maintained by said buffer will result in an error. This method allows * for "destructive streaming", see #getDestructiveBufferInput() * * @param iBuffer an index <tt>0 &lt;= iBuffer &lt; getBufferCount()</tt> * * @return the released buffer */ protected ReadBuffer releaseBuffer(int iBuffer) { ReadBuffer[] abuf = m_abuf; ReadBuffer buf = m_abuf[iBuffer]; abuf[iBuffer] = null; return buf; } /** * Determine which underlying ReadBuffer contains the specified offset. * * @param of an offset into this MultiBufferReadBuffer * * @return the index of the ReadBuffer containing the specified offset */ protected int getBufferIndexByOffset(int of) { int[] aof = m_aofBuffer; int cBuffers = aof.length; if (cBuffers == 1) { // since there is only one buffer, the offset occurs within it return 0; } // adjust offset to create an absolute offset into the virtual // ReadBuffer composed of all the underlying ReadBuffer objects of += m_ofStart; // optimization: use previous "cached" result, and check both that // buffer and the buffer after it (assuming there is buffer by // buffer forward progress) int iBuf = 0; // "closest" node from the binary search int iLow = 0; // "left-most" node for the binary search boolean fFound = false; if (of >= m_ofLastOffset) { for (iBuf = m_iBufLastAnswer, iLow = iBuf + 2; iBuf < iLow; ++iBuf) { if (iBuf + 1 >= cBuffers || of < aof[iBuf+1]) { fFound = true; break; } } } if (!fFound) { // brute-force binary search through the array of offsets int iHigh = cBuffers - 1; while (iLow <= iHigh) { // pick a buffer to act as the root of the tree (or sub-tree) // that is being searched int iRoot = (iLow + iHigh) >> 1; // absolute offset of the first byte of the buffer int ofRoot = aof[iRoot]; if (of == ofRoot) { // exact hit iBuf = iRoot; while (iBuf < iHigh && ofRoot == aof[iBuf + 1]) { // COH-5507 : skip over any empty buffers iBuf += 1; } break; } else if (of < ofRoot) { iHigh = iRoot - 1; } else // if (of > ofRoot) { // go "right" in the binary tree .. iLow = iRoot + 1; // .. but remember this is the closest we've come so far .. iBuf = iRoot; } } } // update "cache" m_ofLastOffset = of; m_iBufLastAnswer = iBuf; return iBuf; } // ----- data members --------------------------------------------------- /** * The array of all ReadBuffer objects allocated to store the contents * of this MultiBufferReadBuffer. */ private final ReadBuffer[] m_abuf; /** * An array of absolute offsets, each corresponding to the first byte * stored in the corresponding ReadBuffer object. */ private final int[] m_aofBuffer; /** * The starting offset of this ReadBuffer. Basically, if there were a * virtual ReadBuffer composed of all the contents of all the underlying * ReadBuffers in {@link #m_abuf}, then this is the offset into that * virtual ReadBuffer. */ private final int m_ofStart; /** * The ending offset of this ReadBuffer. Basically, if there were a * virtual ReadBuffer composed of all the contents of all the underlying * ReadBuffers in {@link #m_abuf}, then this is the offset into that * ReadBuffer of the first byte that this MultiBufferReadBuffer does not * permit access to; i.e. it is the "exclusive" ending offset. */ private final int m_ofEnd; /** * Cached "last offset looked up" value. */ private transient int m_ofLastOffset; /** * Cached "last buffer index answer" value. */ private transient int m_iBufLastAnswer; }
apache/flink
35,356
flink-python/src/main/java/org/apache/flink/table/runtime/typeutils/PythonTypeUtils.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.runtime.typeutils; import org.apache.flink.annotation.Internal; import org.apache.flink.api.common.typeutils.TypeSerializer; import org.apache.flink.api.common.typeutils.base.BooleanSerializer; import org.apache.flink.api.common.typeutils.base.ByteSerializer; import org.apache.flink.api.common.typeutils.base.DoubleSerializer; import org.apache.flink.api.common.typeutils.base.FloatSerializer; import org.apache.flink.api.common.typeutils.base.IntSerializer; import org.apache.flink.api.common.typeutils.base.LongSerializer; import org.apache.flink.api.common.typeutils.base.ShortSerializer; import org.apache.flink.api.common.typeutils.base.array.BytePrimitiveArraySerializer; import org.apache.flink.fnexecution.v1.FlinkFnApi; import org.apache.flink.table.data.ArrayData; import org.apache.flink.table.data.DecimalData; import org.apache.flink.table.data.MapData; import org.apache.flink.table.data.RowData; import org.apache.flink.table.data.util.DataFormatConverters; import org.apache.flink.table.runtime.typeutils.serializers.python.ArrayDataSerializer; import org.apache.flink.table.runtime.typeutils.serializers.python.DecimalDataSerializer; import org.apache.flink.table.runtime.typeutils.serializers.python.MapDataSerializer; import org.apache.flink.table.runtime.typeutils.serializers.python.RowDataSerializer; import org.apache.flink.table.types.logical.ArrayType; import org.apache.flink.table.types.logical.BigIntType; import org.apache.flink.table.types.logical.BinaryType; import org.apache.flink.table.types.logical.BooleanType; import org.apache.flink.table.types.logical.CharType; import org.apache.flink.table.types.logical.DateType; import org.apache.flink.table.types.logical.DecimalType; import org.apache.flink.table.types.logical.DoubleType; import org.apache.flink.table.types.logical.FloatType; import org.apache.flink.table.types.logical.IntType; import org.apache.flink.table.types.logical.LegacyTypeInformationType; import org.apache.flink.table.types.logical.LocalZonedTimestampType; import org.apache.flink.table.types.logical.LogicalType; import org.apache.flink.table.types.logical.MapType; import org.apache.flink.table.types.logical.RowType; import org.apache.flink.table.types.logical.SmallIntType; import org.apache.flink.table.types.logical.TimeType; import org.apache.flink.table.types.logical.TimestampType; import org.apache.flink.table.types.logical.TinyIntType; import org.apache.flink.table.types.logical.VarBinaryType; import org.apache.flink.table.types.logical.VarCharType; import org.apache.flink.table.types.logical.utils.LogicalTypeDefaultVisitor; import org.apache.flink.table.types.utils.TypeConversions; import org.apache.flink.types.Row; import org.apache.flink.types.RowKind; import java.io.Serializable; import java.lang.reflect.Array; import java.math.BigDecimal; import java.math.RoundingMode; import java.sql.Time; import java.util.HashMap; import java.util.Map; /** * Utilities for converting Flink logical types, such as convert it to the related TypeSerializer or * ProtoType. */ @Internal public final class PythonTypeUtils { private static final String EMPTY_STRING = ""; public static FlinkFnApi.Schema.FieldType toProtoType(LogicalType logicalType) { return logicalType.accept(new PythonTypeUtils.LogicalTypeToProtoTypeConverter()); } public static TypeSerializer toInternalSerializer(LogicalType logicalType) { return logicalType.accept(new LogicalTypetoInternalSerializerConverter()); } public static DataConverter toDataConverter(LogicalType logicalType) { return logicalType.accept(new LogicalTypeToDataConverter()); } /** * Convert the specified bigDecimal according to the specified precision and scale. The * specified bigDecimal may be rounded to have the specified scale and then the specified * precision is checked. If precision overflow, it will return `null`. * * <p>Note: The implementation refers to {@link DecimalData#fromBigDecimal}. */ public static BigDecimal fromBigDecimal(BigDecimal bigDecimal, int precision, int scale) { if (bigDecimal.scale() != scale || bigDecimal.precision() > precision) { // need adjust the precision and scale bigDecimal = bigDecimal.setScale(scale, RoundingMode.HALF_UP); if (bigDecimal.precision() > precision) { return null; } } return bigDecimal; } private static class LogicalTypetoInternalSerializerConverter extends LogicalTypeDefaultVisitor<TypeSerializer> { @Override public TypeSerializer visit(BooleanType booleanType) { return BooleanSerializer.INSTANCE; } @Override public TypeSerializer visit(TinyIntType tinyIntType) { return ByteSerializer.INSTANCE; } @Override public TypeSerializer visit(SmallIntType smallIntType) { return ShortSerializer.INSTANCE; } @Override public TypeSerializer visit(IntType intType) { return IntSerializer.INSTANCE; } @Override public TypeSerializer visit(BigIntType bigIntType) { return LongSerializer.INSTANCE; } @Override public TypeSerializer visit(FloatType floatType) { return FloatSerializer.INSTANCE; } @Override public TypeSerializer visit(DoubleType doubleType) { return DoubleSerializer.INSTANCE; } @Override public TypeSerializer visit(BinaryType binaryType) { return BytePrimitiveArraySerializer.INSTANCE; } @Override public TypeSerializer visit(VarBinaryType varBinaryType) { return BytePrimitiveArraySerializer.INSTANCE; } @Override public TypeSerializer visit(RowType rowType) { final TypeSerializer[] fieldTypeSerializers = rowType.getFields().stream() .map(f -> f.getType().accept(this)) .toArray(TypeSerializer[]::new); return new RowDataSerializer( rowType.getChildren().toArray(new LogicalType[0]), fieldTypeSerializers); } @Override public TypeSerializer visit(VarCharType varCharType) { return StringDataSerializer.INSTANCE; } @Override public TypeSerializer visit(CharType charType) { return StringDataSerializer.INSTANCE; } @Override public TypeSerializer visit(DateType dateType) { return IntSerializer.INSTANCE; } @Override public TypeSerializer visit(TimeType timeType) { return IntSerializer.INSTANCE; } @Override public TypeSerializer visit(TimestampType timestampType) { return new TimestampDataSerializer(timestampType.getPrecision()); } @Override public TypeSerializer visit(LocalZonedTimestampType localZonedTimestampType) { return new TimestampDataSerializer(localZonedTimestampType.getPrecision()); } public TypeSerializer visit(ArrayType arrayType) { LogicalType elementType = arrayType.getElementType(); TypeSerializer elementTypeSerializer = elementType.accept(this); return new ArrayDataSerializer(elementType, elementTypeSerializer); } @Override public TypeSerializer visit(MapType mapType) { LogicalType keyType = mapType.getKeyType(); LogicalType valueType = mapType.getValueType(); TypeSerializer<?> keyTypeSerializer = keyType.accept(this); TypeSerializer<?> valueTypeSerializer = valueType.accept(this); return new MapDataSerializer( keyType, valueType, keyTypeSerializer, valueTypeSerializer); } @Override public TypeSerializer visit(DecimalType decimalType) { return new DecimalDataSerializer(decimalType.getPrecision(), decimalType.getScale()); } @Override protected TypeSerializer defaultMethod(LogicalType logicalType) { throw new UnsupportedOperationException( String.format( "Python UDF doesn't support logical type %s currently.", logicalType.asSummaryString())); } } /** Converter That convert the logicalType to the related Prototype. */ public static class LogicalTypeToProtoTypeConverter extends LogicalTypeDefaultVisitor<FlinkFnApi.Schema.FieldType> { @Override public FlinkFnApi.Schema.FieldType visit(BooleanType booleanType) { return FlinkFnApi.Schema.FieldType.newBuilder() .setTypeName(FlinkFnApi.Schema.TypeName.BOOLEAN) .setNullable(booleanType.isNullable()) .build(); } @Override public FlinkFnApi.Schema.FieldType visit(TinyIntType tinyIntType) { return FlinkFnApi.Schema.FieldType.newBuilder() .setTypeName(FlinkFnApi.Schema.TypeName.TINYINT) .setNullable(tinyIntType.isNullable()) .build(); } @Override public FlinkFnApi.Schema.FieldType visit(SmallIntType smallIntType) { return FlinkFnApi.Schema.FieldType.newBuilder() .setTypeName(FlinkFnApi.Schema.TypeName.SMALLINT) .setNullable(smallIntType.isNullable()) .build(); } @Override public FlinkFnApi.Schema.FieldType visit(IntType intType) { return FlinkFnApi.Schema.FieldType.newBuilder() .setTypeName(FlinkFnApi.Schema.TypeName.INT) .setNullable(intType.isNullable()) .build(); } @Override public FlinkFnApi.Schema.FieldType visit(BigIntType bigIntType) { return FlinkFnApi.Schema.FieldType.newBuilder() .setTypeName(FlinkFnApi.Schema.TypeName.BIGINT) .setNullable(bigIntType.isNullable()) .build(); } @Override public FlinkFnApi.Schema.FieldType visit(FloatType floatType) { return FlinkFnApi.Schema.FieldType.newBuilder() .setTypeName(FlinkFnApi.Schema.TypeName.FLOAT) .setNullable(floatType.isNullable()) .build(); } @Override public FlinkFnApi.Schema.FieldType visit(DoubleType doubleType) { return FlinkFnApi.Schema.FieldType.newBuilder() .setTypeName(FlinkFnApi.Schema.TypeName.DOUBLE) .setNullable(doubleType.isNullable()) .build(); } @Override public FlinkFnApi.Schema.FieldType visit(BinaryType binaryType) { return FlinkFnApi.Schema.FieldType.newBuilder() .setTypeName(FlinkFnApi.Schema.TypeName.BINARY) .setBinaryInfo( FlinkFnApi.Schema.BinaryInfo.newBuilder() .setLength(binaryType.getLength())) .setNullable(binaryType.isNullable()) .build(); } @Override public FlinkFnApi.Schema.FieldType visit(VarBinaryType varBinaryType) { return FlinkFnApi.Schema.FieldType.newBuilder() .setTypeName(FlinkFnApi.Schema.TypeName.VARBINARY) .setVarBinaryInfo( FlinkFnApi.Schema.VarBinaryInfo.newBuilder() .setLength(varBinaryType.getLength())) .setNullable(varBinaryType.isNullable()) .build(); } @Override public FlinkFnApi.Schema.FieldType visit(CharType charType) { return FlinkFnApi.Schema.FieldType.newBuilder() .setTypeName(FlinkFnApi.Schema.TypeName.CHAR) .setCharInfo( FlinkFnApi.Schema.CharInfo.newBuilder().setLength(charType.getLength())) .setNullable(charType.isNullable()) .build(); } @Override public FlinkFnApi.Schema.FieldType visit(VarCharType varCharType) { return FlinkFnApi.Schema.FieldType.newBuilder() .setTypeName(FlinkFnApi.Schema.TypeName.VARCHAR) .setVarCharInfo( FlinkFnApi.Schema.VarCharInfo.newBuilder() .setLength(varCharType.getLength())) .setNullable(varCharType.isNullable()) .build(); } @Override public FlinkFnApi.Schema.FieldType visit(DateType dateType) { return FlinkFnApi.Schema.FieldType.newBuilder() .setTypeName(FlinkFnApi.Schema.TypeName.DATE) .setNullable(dateType.isNullable()) .build(); } @Override public FlinkFnApi.Schema.FieldType visit(TimeType timeType) { return FlinkFnApi.Schema.FieldType.newBuilder() .setTypeName(FlinkFnApi.Schema.TypeName.TIME) .setTimeInfo( FlinkFnApi.Schema.TimeInfo.newBuilder() .setPrecision(timeType.getPrecision())) .setNullable(timeType.isNullable()) .build(); } @Override public FlinkFnApi.Schema.FieldType visit(TimestampType timestampType) { FlinkFnApi.Schema.FieldType.Builder builder = FlinkFnApi.Schema.FieldType.newBuilder() .setTypeName(FlinkFnApi.Schema.TypeName.TIMESTAMP) .setNullable(timestampType.isNullable()); FlinkFnApi.Schema.TimestampInfo.Builder timestampInfoBuilder = FlinkFnApi.Schema.TimestampInfo.newBuilder() .setPrecision(timestampType.getPrecision()); builder.setTimestampInfo(timestampInfoBuilder); return builder.build(); } @Override public FlinkFnApi.Schema.FieldType visit(LocalZonedTimestampType localZonedTimestampType) { FlinkFnApi.Schema.FieldType.Builder builder = FlinkFnApi.Schema.FieldType.newBuilder() .setTypeName(FlinkFnApi.Schema.TypeName.LOCAL_ZONED_TIMESTAMP) .setNullable(localZonedTimestampType.isNullable()); FlinkFnApi.Schema.LocalZonedTimestampInfo.Builder dateTimeBuilder = FlinkFnApi.Schema.LocalZonedTimestampInfo.newBuilder() .setPrecision(localZonedTimestampType.getPrecision()); builder.setLocalZonedTimestampInfo(dateTimeBuilder.build()); return builder.build(); } @Override public FlinkFnApi.Schema.FieldType visit(DecimalType decimalType) { FlinkFnApi.Schema.FieldType.Builder builder = FlinkFnApi.Schema.FieldType.newBuilder() .setTypeName(FlinkFnApi.Schema.TypeName.DECIMAL) .setNullable(decimalType.isNullable()); FlinkFnApi.Schema.DecimalInfo.Builder decimalInfoBuilder = FlinkFnApi.Schema.DecimalInfo.newBuilder() .setPrecision(decimalType.getPrecision()) .setScale(decimalType.getScale()); builder.setDecimalInfo(decimalInfoBuilder); return builder.build(); } @Override public FlinkFnApi.Schema.FieldType visit(ArrayType arrayType) { FlinkFnApi.Schema.FieldType.Builder builder = FlinkFnApi.Schema.FieldType.newBuilder() .setTypeName(FlinkFnApi.Schema.TypeName.BASIC_ARRAY) .setNullable(arrayType.isNullable()); FlinkFnApi.Schema.FieldType elementFieldType = arrayType.getElementType().accept(this); builder.setCollectionElementType(elementFieldType); return builder.build(); } @Override public FlinkFnApi.Schema.FieldType visit(MapType mapType) { FlinkFnApi.Schema.FieldType.Builder builder = FlinkFnApi.Schema.FieldType.newBuilder() .setTypeName(FlinkFnApi.Schema.TypeName.MAP) .setNullable(mapType.isNullable()); FlinkFnApi.Schema.MapInfo.Builder mapBuilder = FlinkFnApi.Schema.MapInfo.newBuilder() .setKeyType(mapType.getKeyType().accept(this)) .setValueType(mapType.getValueType().accept(this)); builder.setMapInfo(mapBuilder.build()); return builder.build(); } @Override public FlinkFnApi.Schema.FieldType visit(RowType rowType) { FlinkFnApi.Schema.FieldType.Builder builder = FlinkFnApi.Schema.FieldType.newBuilder() .setTypeName(FlinkFnApi.Schema.TypeName.ROW) .setNullable(rowType.isNullable()); FlinkFnApi.Schema.Builder schemaBuilder = FlinkFnApi.Schema.newBuilder(); for (RowType.RowField field : rowType.getFields()) { schemaBuilder.addFields( FlinkFnApi.Schema.Field.newBuilder() .setName(field.getName()) .setDescription(field.getDescription().orElse(EMPTY_STRING)) .setType(field.getType().accept(this)) .build()); } builder.setRowSchema(schemaBuilder.build()); return builder.build(); } @Override protected FlinkFnApi.Schema.FieldType defaultMethod(LogicalType logicalType) { if (logicalType instanceof LegacyTypeInformationType) { Class<?> typeClass = ((LegacyTypeInformationType) logicalType) .getTypeInformation() .getTypeClass(); if (typeClass == BigDecimal.class) { FlinkFnApi.Schema.FieldType.Builder builder = FlinkFnApi.Schema.FieldType.newBuilder() .setTypeName(FlinkFnApi.Schema.TypeName.DECIMAL) .setNullable(logicalType.isNullable()); // Because we can't get precision and scale from legacy BIG_DEC_TYPE_INFO, // we set the precision and scale to default value compatible with python. FlinkFnApi.Schema.DecimalInfo.Builder decimalTypeBuilder = FlinkFnApi.Schema.DecimalInfo.newBuilder() .setPrecision(38) .setScale(18); builder.setDecimalInfo(decimalTypeBuilder); return builder.build(); } } throw new UnsupportedOperationException( String.format( "Python UDF doesn't support logical type %s currently.", logicalType.asSummaryString())); } } /** Data Converter that converts the data to the java format data which can be used in PemJa. */ public abstract static class DataConverter<IN, INTER, OUT> implements Serializable { private static final long serialVersionUID = 1L; private final DataFormatConverters.DataFormatConverter<IN, INTER> dataFormatConverter; public DataConverter( DataFormatConverters.DataFormatConverter<IN, INTER> dataFormatConverter) { this.dataFormatConverter = dataFormatConverter; } public final IN toInternal(OUT value) { return dataFormatConverter.toInternal(toInternalImpl(value)); } public final OUT toExternal(RowData row, int column) { return toExternalImpl(dataFormatConverter.toExternal(row, column)); } abstract INTER toInternalImpl(OUT value); abstract OUT toExternalImpl(INTER value); } /** Identity data converter. */ public static final class IdentityDataConverter<IN, OUT> extends DataConverter<IN, OUT, OUT> { IdentityDataConverter( DataFormatConverters.DataFormatConverter<IN, OUT> dataFormatConverter) { super(dataFormatConverter); } @Override OUT toInternalImpl(OUT value) { return value; } @Override OUT toExternalImpl(OUT value) { return value; } } /** * Python Long will be converted to Long in PemJa, so we need ByteDataConverter to convert Java * Long to internal Byte. */ public static final class ByteDataConverter extends DataConverter<Byte, Byte, Long> { public static final ByteDataConverter INSTANCE = new ByteDataConverter(); private ByteDataConverter() { super(DataFormatConverters.ByteConverter.INSTANCE); } @Override Byte toInternalImpl(Long value) { return value.byteValue(); } @Override Long toExternalImpl(Byte value) { return value.longValue(); } } /** * Python Long will be converted to Long in PemJa, so we need ShortDataConverter to convert Java * Long to internal Short. */ public static final class ShortDataConverter extends DataConverter<Short, Short, Long> { public static final ShortDataConverter INSTANCE = new ShortDataConverter(); private ShortDataConverter() { super(DataFormatConverters.ShortConverter.INSTANCE); } @Override Short toInternalImpl(Long value) { return value.shortValue(); } @Override Long toExternalImpl(Short value) { return value.longValue(); } } /** * Python Long will be converted to Long in PemJa, so we need IntDataConverter to convert Java * Long to internal Integer. */ public static final class IntDataConverter extends DataConverter<Integer, Integer, Long> { public static final IntDataConverter INSTANCE = new IntDataConverter(); private IntDataConverter() { super(DataFormatConverters.IntConverter.INSTANCE); } @Override Integer toInternalImpl(Long value) { return value.intValue(); } @Override Long toExternalImpl(Integer value) { return value.longValue(); } } /** * Python Float will be converted to Double in PemJa, so we need FloatDataConverter to convert * Java Double to internal Float. */ public static final class FloatDataConverter extends DataConverter<Float, Float, Double> { public static final FloatDataConverter INSTANCE = new FloatDataConverter(); private FloatDataConverter() { super(DataFormatConverters.FloatConverter.INSTANCE); } @Override Float toInternalImpl(Double value) { return value.floatValue(); } @Override Double toExternalImpl(Float value) { return value.doubleValue(); } } /** * Python datetime.time will be converted to Time in PemJa, so we need TimeDataConverter to * convert Java Double to internal Integer. */ public static final class TimeDataConverter extends DataConverter<Integer, Integer, Time> { public static final TimeDataConverter INSTANCE = new TimeDataConverter(); private TimeDataConverter() { super(DataFormatConverters.IntConverter.INSTANCE); } @Override Integer toInternalImpl(Time value) { return (int) value.getTime(); } @Override Time toExternalImpl(Integer value) { return new Time(value); } } /** * RowData will be converted to the Object Array [RowKind(as Long Object), Field Values(as * Object Array)]. */ public static final class RowDataConverter extends DataConverter<RowData, Row, Object[]> { private final DataConverter[] fieldDataConverters; private final Row reuseRow; private final Object[] reuseExternalRow; private final Object[] reuseExternalRowData; RowDataConverter( DataConverter[] fieldDataConverters, DataFormatConverters.DataFormatConverter<RowData, Row> dataFormatConverter) { super(dataFormatConverter); this.fieldDataConverters = fieldDataConverters; this.reuseRow = new Row(fieldDataConverters.length); this.reuseExternalRowData = new Object[fieldDataConverters.length]; this.reuseExternalRow = new Object[2]; this.reuseExternalRow[1] = reuseExternalRowData; } @SuppressWarnings("unchecked") @Override Row toInternalImpl(Object[] value) { RowKind rowKind = RowKind.fromByteValue(((Long) value[0]).byteValue()); reuseRow.setKind(rowKind); Object[] fieldValues = (Object[]) value[1]; for (int i = 0; i < fieldValues.length; i++) { reuseRow.setField(i, fieldDataConverters[i].toInternalImpl(fieldValues[i])); } return reuseRow; } @SuppressWarnings("unchecked") @Override Object[] toExternalImpl(Row value) { reuseExternalRow[0] = (long) value.getKind().toByteValue(); for (int i = 0; i < value.getArity(); i++) { reuseExternalRowData[i] = fieldDataConverters[i].toExternalImpl(value.getField(i)); } return reuseExternalRow; } } /** * The element in the Object Array will be converted to the corresponding Data through element * DataConverter. */ public static final class ArrayDataConverter<T> extends DataConverter<ArrayData, T[], Object[]> { private final DataConverter elementConverter; private final Class<T> componentClass; ArrayDataConverter( Class<T> componentClass, DataConverter elementConverter, DataFormatConverters.DataFormatConverter<ArrayData, T[]> dataFormatConverter) { super(dataFormatConverter); this.componentClass = componentClass; this.elementConverter = elementConverter; } @SuppressWarnings("unchecked") @Override T[] toInternalImpl(Object[] value) { T[] array = (T[]) Array.newInstance(componentClass, value.length); for (int i = 0; i < value.length; i++) { array[i] = (T) elementConverter.toInternalImpl(value[i]); } return array; } @SuppressWarnings("unchecked") @Override Object[] toExternalImpl(T[] value) { Object[] array = new Object[value.length]; for (int i = 0; i < value.length; i++) { array[i] = elementConverter.toExternalImpl(value[i]); } return array; } } /** * The key/value in the Map will be converted to the corresponding Data through key/value * DataConverter. */ public static final class MapDataConverter extends DataConverter<MapData, Map<?, ?>, Map<?, ?>> { private final DataConverter keyConverter; private final DataConverter valueConverter; MapDataConverter( DataConverter keyConverter, DataConverter valueConverter, DataFormatConverters.DataFormatConverter<MapData, Map<?, ?>> dataFormatConverter) { super(dataFormatConverter); this.keyConverter = keyConverter; this.valueConverter = valueConverter; } @SuppressWarnings("unchecked") @Override Map toInternalImpl(Map<?, ?> value) { Map<Object, Object> map = new HashMap<>(); for (Map.Entry<?, ?> entry : value.entrySet()) { map.put( keyConverter.toInternalImpl(entry.getKey()), valueConverter.toInternalImpl(entry.getValue())); } return map; } @SuppressWarnings("unchecked") @Override Map<?, ?> toExternalImpl(Map<?, ?> value) { Map<Object, Object> map = new HashMap<>(); for (Map.Entry<?, ?> entry : value.entrySet()) { map.put( keyConverter.toExternalImpl(entry.getKey()), valueConverter.toExternalImpl(entry.getValue())); } return map; } } private static final class LogicalTypeToDataConverter extends LogicalTypeDefaultVisitor<DataConverter> { @Override public DataConverter visit(BooleanType booleanType) { return defaultConverter(booleanType); } @Override public DataConverter visit(TinyIntType tinyIntType) { return ByteDataConverter.INSTANCE; } @Override public DataConverter visit(SmallIntType smallIntType) { return ShortDataConverter.INSTANCE; } @Override public DataConverter visit(IntType intType) { return IntDataConverter.INSTANCE; } @Override public DataConverter visit(BigIntType bigIntType) { return defaultConverter(bigIntType); } @Override public DataConverter visit(FloatType floatType) { return FloatDataConverter.INSTANCE; } @Override public DataConverter visit(DoubleType doubleType) { return defaultConverter(doubleType); } @Override public DataConverter visit(DecimalType decimalType) { return defaultConverter(decimalType); } @Override public DataConverter visit(VarCharType varCharType) { return defaultConverter(varCharType); } @Override public DataConverter visit(CharType charType) { return defaultConverter(charType); } @Override public DataConverter visit(VarBinaryType varBinaryType) { return defaultConverter(varBinaryType); } @Override public DataConverter visit(BinaryType binaryType) { return defaultConverter(binaryType); } @Override public DataConverter visit(DateType dateType) { return new IdentityDataConverter<>(DataFormatConverters.DateConverter.INSTANCE); } @Override public DataConverter visit(TimeType timeType) { return TimeDataConverter.INSTANCE; } @Override public DataConverter visit(TimestampType timestampType) { return new IdentityDataConverter<>( new DataFormatConverters.TimestampConverter(timestampType.getPrecision())); } @Override public DataConverter visit(LocalZonedTimestampType localZonedTimestampType) { return new IdentityDataConverter<>( new DataFormatConverters.TimestampConverter( localZonedTimestampType.getPrecision())); } @SuppressWarnings("unchecked") @Override public DataConverter visit(ArrayType arrayType) { LogicalType elementType = arrayType.getElementType(); DataConverter elementDataConverter = elementType.accept(this); return new ArrayDataConverter( TypeConversions.fromLogicalToDataType(elementType).getConversionClass(), elementDataConverter, DataFormatConverters.getConverterForDataType( TypeConversions.fromLogicalToDataType(arrayType))); } @SuppressWarnings("unchecked") @Override public DataConverter visit(MapType mapType) { LogicalType keyType = mapType.getKeyType(); LogicalType valueType = mapType.getValueType(); DataConverter keyTypeDataConverter = keyType.accept(this); DataConverter valueTyDataConverter = valueType.accept(this); return new MapDataConverter( keyTypeDataConverter, valueTyDataConverter, DataFormatConverters.getConverterForDataType( TypeConversions.fromLogicalToDataType(mapType))); } @SuppressWarnings("unchecked") @Override public DataConverter visit(RowType rowType) { final DataConverter[] fieldDataConverters = rowType.getFields().stream() .map(f -> f.getType().accept(this)) .toArray(DataConverter[]::new); return new RowDataConverter( fieldDataConverters, DataFormatConverters.getConverterForDataType( TypeConversions.fromLogicalToDataType(rowType))); } @Override protected DataConverter defaultMethod(LogicalType logicalType) { throw new UnsupportedOperationException( String.format( "Currently, Python UDF doesn't support logical type %s in Thread Mode.", logicalType.asSummaryString())); } @SuppressWarnings("unchecked") private DataConverter defaultConverter(LogicalType logicalType) { return new IdentityDataConverter<>( DataFormatConverters.getConverterForDataType( TypeConversions.fromLogicalToDataType(logicalType))); } } }
apache/tomee
35,287
container/openejb-core/src/main/java/org/apache/openejb/core/mdb/MdbContainer.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.openejb.core.mdb; import org.apache.openejb.ApplicationException; import org.apache.openejb.BeanContext; import org.apache.openejb.ContainerType; import org.apache.openejb.InterfaceType; import org.apache.openejb.OpenEJBException; import org.apache.openejb.RpcContainer; import org.apache.openejb.SystemException; import org.apache.openejb.core.ExceptionType; import org.apache.openejb.core.Operation; import org.apache.openejb.core.ThreadContext; import org.apache.openejb.core.interceptor.InterceptorData; import org.apache.openejb.core.interceptor.InterceptorStack; import org.apache.openejb.core.timer.EjbTimerService; import org.apache.openejb.core.transaction.TransactionPolicy; import org.apache.openejb.loader.Options; import org.apache.openejb.loader.SystemInstance; import org.apache.openejb.monitoring.*; import org.apache.openejb.resource.XAResourceWrapper; import org.apache.openejb.spi.SecurityService; import org.apache.openejb.util.LogCategory; import org.apache.openejb.util.Logger; import org.apache.openejb.util.StringTemplate; import org.apache.xbean.recipe.ObjectRecipe; import org.apache.xbean.recipe.Option; import javax.management.Attribute; import javax.management.AttributeList; import javax.management.AttributeNotFoundException; import javax.management.DynamicMBean; import javax.management.InvalidAttributeValueException; import javax.management.MBeanAttributeInfo; import javax.management.MBeanConstructorInfo; import javax.management.MBeanException; import javax.management.MBeanInfo; import javax.management.MBeanNotificationInfo; import javax.management.MBeanOperationInfo; import javax.management.MBeanParameterInfo; import javax.management.MBeanServer; import javax.management.MalformedObjectNameException; import javax.management.ObjectName; import javax.management.ReflectionException; import javax.naming.NamingException; import jakarta.resource.ResourceException; import jakarta.resource.spi.ActivationSpec; import jakarta.resource.spi.ResourceAdapter; import jakarta.resource.spi.UnavailableException; import javax.transaction.xa.XAResource; import jakarta.validation.ConstraintViolationException; import jakarta.validation.Validator; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicBoolean; import static javax.management.MBeanOperationInfo.ACTION; import static org.apache.openejb.core.transaction.EjbTransactionUtil.afterInvoke; import static org.apache.openejb.core.transaction.EjbTransactionUtil.createTransactionPolicy; import static org.apache.openejb.core.transaction.EjbTransactionUtil.handleApplicationException; import static org.apache.openejb.core.transaction.EjbTransactionUtil.handleSystemException; public class MdbContainer implements RpcContainer, BaseMdbContainer { private static final Logger logger = Logger.getInstance(LogCategory.OPENEJB, "org.apache.openejb.util.resources"); private static final ThreadLocal<BeanContext> CURRENT = new ThreadLocal<>(); private static final Object[] NO_ARGS = new Object[0]; private final Map<BeanContext, ObjectName> mbeanNames = new ConcurrentHashMap<>(); private final Map<BeanContext, MdbActivationContext> activationContexts = new ConcurrentHashMap<>(); private final Object containerID; private final SecurityService securityService; private final ResourceAdapter resourceAdapter; private final Class messageListenerInterface; private final Class activationSpecClass; private final int instanceLimit; private final boolean failOnUnknownActivationSpec; private final ConcurrentMap<Object, BeanContext> deployments = new ConcurrentHashMap<>(); private final XAResourceWrapper xaResourceWrapper; private final InboundRecovery inboundRecovery; private final Properties properties = new Properties(); public MdbContainer(final Object containerID, final SecurityService securityService, final ResourceAdapter resourceAdapter, final Class messageListenerInterface, final Class activationSpecClass, final int instanceLimit, final boolean failOnUnknownActivationSpec) { this.containerID = containerID; this.securityService = securityService; this.resourceAdapter = resourceAdapter; this.messageListenerInterface = messageListenerInterface; this.activationSpecClass = activationSpecClass; this.instanceLimit = instanceLimit; this.failOnUnknownActivationSpec = failOnUnknownActivationSpec; xaResourceWrapper = SystemInstance.get().getComponent(XAResourceWrapper.class); inboundRecovery = SystemInstance.get().getComponent(InboundRecovery.class); } public BeanContext[] getBeanContexts() { return deployments.values().toArray(new BeanContext[deployments.size()]); } public BeanContext getBeanContext(final Object deploymentID) { return deployments.get(deploymentID); } public ContainerType getContainerType() { return ContainerType.MESSAGE_DRIVEN; } public Object getContainerID() { return containerID; } public ResourceAdapter getResourceAdapter() { return resourceAdapter; } public Class getMessageListenerInterface() { return messageListenerInterface; } public Class getActivationSpecClass() { return activationSpecClass; } public Properties getProperties() { return properties; } public void deploy(final BeanContext beanContext) throws OpenEJBException { final Object deploymentId = beanContext.getDeploymentID(); if (!beanContext.getMdbInterface().equals(messageListenerInterface)) { throw new OpenEJBException("Deployment '" + deploymentId + "' has message listener interface " + beanContext.getMdbInterface().getName() + " but this MDB container only supports " + messageListenerInterface); } // create the activation spec final ActivationSpec activationSpec = createActivationSpec(beanContext); if (inboundRecovery != null) { inboundRecovery.recover(resourceAdapter, activationSpec, containerID.toString()); } final Options options = new Options(beanContext.getProperties()); final int instanceLimit = options.get("InstanceLimit", this.instanceLimit); // create the message endpoint final MdbInstanceFactory instanceFactory = new MdbInstanceFactory(beanContext, securityService, instanceLimit); final EndpointFactory endpointFactory = new EndpointFactory(activationSpec, this, beanContext, instanceFactory, null, xaResourceWrapper, false); // update the data structures // this must be done before activating the endpoint since the ra may immedately begin delivering messages beanContext.setContainer(this); beanContext.setContainerData(endpointFactory); deployments.put(deploymentId, beanContext); final MBeanServer server = LocalMBeanServer.get(); // Create stats interceptor if (StatsInterceptor.isStatsActivated()) { final StatsInterceptor stats = new StatsInterceptor(beanContext.getBeanClass()); beanContext.addFirstSystemInterceptor(stats); final ObjectNameBuilder jmxName = new ObjectNameBuilder("openejb.management"); jmxName.set("J2EEServer", "openejb"); jmxName.set("J2EEApplication", null); jmxName.set("EJBModule", beanContext.getModuleID()); jmxName.set("MessageDrivenBean", beanContext.getEjbName()); jmxName.set("j2eeType", ""); jmxName.set("name", beanContext.getEjbName()); // register the invocation stats interceptor try { final ObjectName objectName = jmxName.set("j2eeType", "Invocations").build(); if (server.isRegistered(objectName)) { server.unregisterMBean(objectName); } server.registerMBean(new ManagedMBean(stats), objectName); endpointFactory.jmxNames.add(objectName); } catch (final Exception e) { logger.error("Unable to register MBean ", e); } } // Expose InstanceLimit/InstanceCount stats through JMX { final ObjectNameBuilder jmxName = new ObjectNameBuilder("openejb.management"); jmxName.set("J2EEServer", "openejb"); jmxName.set("J2EEApplication", null); jmxName.set("EJBModule", beanContext.getModuleID()); jmxName.set("MessageDrivenBean", beanContext.getEjbName()); jmxName.set("j2eeType", ""); jmxName.set("name", beanContext.getEjbName()); try { final ObjectName objectName = jmxName.set("j2eeType", "Instances").build(); if (server.isRegistered(objectName)) { server.unregisterMBean(objectName); } server.registerMBean(new ManagedMBean(new InstanceMonitor(instanceFactory)), objectName); endpointFactory.jmxNames.add(objectName); } catch (final Exception e) { logger.error("Unable to register MBean ", e); } } // activate the endpoint CURRENT.set(beanContext); try { final MdbActivationContext activationContext = new MdbActivationContext(Thread.currentThread().getContextClassLoader(), beanContext, resourceAdapter, endpointFactory, activationSpec); activationContexts.put(beanContext, activationContext); boolean activeOnStartup = true; String activeOnStartupSetting = beanContext.getActivationProperties().get("MdbActiveOnStartup"); if (activeOnStartupSetting == null) { activeOnStartupSetting = beanContext.getActivationProperties().get("DeliveryActive"); } if (activeOnStartupSetting != null) { activeOnStartup = Boolean.parseBoolean(activeOnStartupSetting); } if (activeOnStartup) { activationContext.start(); } else { logger.info("Not auto-activating endpoint for " + beanContext.getDeploymentID()); } String jmxName = beanContext.getActivationProperties().get("MdbJMXControl"); if (jmxName == null) { jmxName = "true"; } addJMxControl(beanContext, jmxName, activationContext); } catch (final ResourceException e) { // activation failed... clean up beanContext.setContainer(null); beanContext.setContainerData(null); deployments.remove(deploymentId); throw new OpenEJBException(e); } finally { CURRENT.remove(); } } // visibility to allow unit testing public ActivationSpec createActivationSpec(final BeanContext beanContext) throws OpenEJBException { try { // initialize the object recipe final ObjectRecipe objectRecipe = new ObjectRecipe(activationSpecClass); objectRecipe.allow(Option.IGNORE_MISSING_PROPERTIES); objectRecipe.disallow(Option.FIELD_INJECTION); final Map<String, String> activationProperties = beanContext.getActivationProperties(); final Map<String, String> context = new HashMap<>(); context.put("ejbJarId", beanContext.getModuleContext().getId()); context.put("ejbName", beanContext.getEjbName()); context.put("appId", beanContext.getModuleContext().getAppContext().getId()); String hostname; try { hostname = InetAddress.getLocalHost().getHostName(); } catch (UnknownHostException e) { hostname = "hostname-unknown"; } context.put("hostName", hostname); String uniqueId = Long.toString(System.currentTimeMillis()); try { Class idGen = Class.forName("org.apache.activemq.util.IdGenerator"); final Object generator = idGen.getConstructor().newInstance(); final Method generateId = idGen.getDeclaredMethod("generateId"); final Object ID = generateId.invoke(generator); uniqueId = ID.toString(); } catch (Exception e) { // ignore and use the timestamp } context.put("uniqueId", uniqueId); for (final Map.Entry<String, String> entry : activationProperties.entrySet()) { objectRecipe.setMethodProperty(entry.getKey(), new StringTemplate(entry.getValue()).apply(context)); } objectRecipe.setMethodProperty("beanClass", beanContext.getBeanClass()); // create the activationSpec final ActivationSpec activationSpec = (ActivationSpec) objectRecipe.create(activationSpecClass.getClassLoader()); // verify all properties except "destination" and "destinationType" were consumed final Set<String> unusedProperties = new TreeSet<>(objectRecipe.getUnsetProperties().keySet()); unusedProperties.remove("destination"); unusedProperties.remove("destinationType"); unusedProperties.remove("destinationLookup"); unusedProperties.remove("connectionFactoryLookup"); unusedProperties.remove("beanClass"); unusedProperties.remove("MdbActiveOnStartup"); unusedProperties.remove("MdbJMXControl"); unusedProperties.remove("DeliveryActive"); if (!unusedProperties.isEmpty()) { final String text = "No setter found for the activation spec properties: " + unusedProperties; if (failOnUnknownActivationSpec) { throw new IllegalArgumentException(text); } else { logger.warning(text); } } // validate the activation spec try { activationSpec.validate(); } catch (final UnsupportedOperationException uoe) { logger.info("ActivationSpec does not support validate. Implementation of validate is optional"); } // also try validating using Bean Validation if there is a Validator available in the context. try { final Validator validator = (Validator) beanContext.getJndiContext().lookup("comp/Validator"); final Set generalSet = validator.validate(activationSpec); if (!generalSet.isEmpty()) { throw new ConstraintViolationException("Constraint violation for ActivationSpec " + activationSpecClass.getName(), generalSet); } } catch (final NamingException e) { logger.debug("No Validator bound to JNDI context"); } // set the resource adapter into the activation spec activationSpec.setResourceAdapter(resourceAdapter); return activationSpec; } catch (final Exception e) { throw new OpenEJBException("Unable to create activation spec", e); } } public void start(final BeanContext info) throws OpenEJBException { final EjbTimerService timerService = info.getEjbTimerService(); if (timerService != null) { timerService.start(); } } public void stop(final BeanContext info) throws OpenEJBException { info.stop(); } public void undeploy(final BeanContext beanContext) throws OpenEJBException { if (!(beanContext instanceof BeanContext)) { return; } try { final EndpointFactory endpointFactory = (EndpointFactory) beanContext.getContainerData(); if (endpointFactory != null) { CURRENT.set(beanContext); try { final ObjectName jmxBeanToRemove = mbeanNames.remove(beanContext); if (jmxBeanToRemove != null) { LocalMBeanServer.unregisterSilently(jmxBeanToRemove); logger.info("Undeployed MDB control for " + beanContext.getDeploymentID()); } final MdbActivationContext activationContext = activationContexts.remove(beanContext); if (activationContext != null && activationContext.isStarted()) { resourceAdapter.endpointDeactivation(endpointFactory, endpointFactory.getActivationSpec()); } } finally { CURRENT.remove(); } final MBeanServer server = LocalMBeanServer.get(); for (final ObjectName objectName : endpointFactory.jmxNames) { try { server.unregisterMBean(objectName); } catch (final Exception e) { logger.error("Unable to unregister MBean " + objectName); } } } } finally { beanContext.setContainer(null); beanContext.setContainerData(null); deployments.remove(beanContext.getDeploymentID()); } } public Object invoke(final Object deploymentId, final InterfaceType type, final Class callInterface, final Method method, final Object[] args, final Object primKey) throws OpenEJBException { final BeanContext beanContext = getBeanContext(deploymentId); final EndpointFactory endpointFactory = (EndpointFactory) beanContext.getContainerData(); final MdbInstanceFactory instanceFactory = endpointFactory.getInstanceFactory(); final Instance instance; try { instance = (Instance) instanceFactory.createInstance(true); } catch (final UnavailableException e) { throw new SystemException("Unable to create instance for invocation", e); } try { beforeDelivery(beanContext, instance, method, null); final Object value = invoke(instance, method, type, args); afterDelivery(instance); return value; } finally { instanceFactory.freeInstance(instance, true); } } public void beforeDelivery(final BeanContext deployInfo, final Object instance, final Method method, final XAResource xaResource) throws SystemException { // intialize call context final ThreadContext callContext = new ThreadContext(deployInfo, null); final ThreadContext oldContext = ThreadContext.enter(callContext); // create mdb context final MdbCallContext mdbCallContext = new MdbCallContext(); callContext.set(MdbCallContext.class, mdbCallContext); mdbCallContext.deliveryMethod = method; mdbCallContext.oldCallContext = oldContext; // call the tx before method try { mdbCallContext.txPolicy = createTransactionPolicy(deployInfo.getTransactionType(method), callContext); // if we have an xaResource and a transaction was not imported from the adapter, enlist the xaResource if (xaResource != null && mdbCallContext.txPolicy.isNewTransaction()) { mdbCallContext.txPolicy.enlistResource(xaResource); } } catch (final ApplicationException e) { ThreadContext.exit(oldContext); throw new SystemException("Should never get an Application exception", e); } catch (final SystemException e) { ThreadContext.exit(oldContext); throw e; } catch (final Exception e) { ThreadContext.exit(oldContext); throw new SystemException("Unable to enlist xa resource in the transaction", e); } } public Object invoke(final Object instance, final Method method, final InterfaceType type, Object... args) throws SystemException, ApplicationException { if (args == null) { args = NO_ARGS; } // get the context data final ThreadContext callContext = ThreadContext.getThreadContext(); final BeanContext deployInfo = callContext.getBeanContext(); final MdbCallContext mdbCallContext = callContext.get(MdbCallContext.class); if (mdbCallContext == null) { throw new IllegalStateException("beforeDelivery was not called"); } // verify the delivery method passed to beforeDeliver is the same method that was invoked if (!mdbCallContext.deliveryMethod.getName().equals(method.getName()) || !Arrays.deepEquals(mdbCallContext.deliveryMethod.getParameterTypes(), method.getParameterTypes())) { throw new IllegalStateException("Delivery method specified in beforeDelivery is not the delivery method called"); } // remember the return value or exception so it can be logged Object returnValue = null; OpenEJBException openEjbException = null; final Operation oldOperation = callContext.getCurrentOperation(); callContext.setCurrentOperation(type == InterfaceType.TIMEOUT ? Operation.TIMEOUT : Operation.BUSINESS); try { if (logger.isDebugEnabled()) { logger.info("invoking method " + method.getName() + " on " + deployInfo.getDeploymentID()); } // determine the target method on the bean instance class final Method targetMethod = deployInfo.getMatchingBeanMethod(method); callContext.set(Method.class, targetMethod); // invoke the target method returnValue = _invoke(instance, targetMethod, args, deployInfo, type, mdbCallContext); return returnValue; } catch (final ApplicationException | SystemException e) { openEjbException = e; throw e; } finally { callContext.setCurrentOperation(oldOperation); // Log the invocation results if (logger.isDebugEnabled()) { if (openEjbException == null) { logger.debug("finished invoking method " + method.getName() + ". Return value:" + returnValue); } else { final Throwable exception = openEjbException.getRootCause() != null ? openEjbException.getRootCause() : openEjbException; logger.debug("finished invoking method " + method.getName() + " with exception " + exception); } } } } private Object _invoke(final Object instance, final Method runMethod, final Object[] args, final BeanContext beanContext, final InterfaceType interfaceType, final MdbCallContext mdbCallContext) throws SystemException, ApplicationException { final Object returnValue; try { final List<InterceptorData> interceptors = beanContext.getMethodInterceptors(runMethod); final InterceptorStack interceptorStack = new InterceptorStack(((Instance) instance).bean, runMethod, interfaceType == InterfaceType.TIMEOUT ? Operation.TIMEOUT : Operation.BUSINESS, interceptors, ((Instance) instance).interceptors); returnValue = interceptorStack.invoke(args); return returnValue; } catch (Throwable e) { // unwrap invocation target exception if (e instanceof InvocationTargetException) { e = ((InvocationTargetException) e).getTargetException(); } // Any exception thrown by reflection; not by the enterprise bean. Possible // Exceptions are: // IllegalAccessException - if the underlying method is inaccessible. // IllegalArgumentException - if the number of actual and formal parameters differ, or if an unwrapping conversion fails. // NullPointerException - if the specified object is null and the method is an instance method. // ExceptionInInitializerError - if the initialization provoked by this method fails. final ExceptionType type = beanContext.getExceptionType(e); if (type == ExceptionType.SYSTEM) { // /// System Exception **************************** handleSystemException(mdbCallContext.txPolicy, e, ThreadContext.getThreadContext()); } else { // // Application Exception *********************** handleApplicationException(mdbCallContext.txPolicy, e, false); } } throw new AssertionError("Should not get here"); } public void afterDelivery(final Object instance) throws SystemException { // get the mdb call context final ThreadContext callContext = ThreadContext.getThreadContext(); final MdbCallContext mdbCallContext = callContext.get(MdbCallContext.class); // invoke the tx after method try { afterInvoke(mdbCallContext.txPolicy, callContext); } catch (final ApplicationException e) { throw new SystemException("Should never get an Application exception", e); } finally { ThreadContext.exit(mdbCallContext.oldCallContext); } } public void release(final BeanContext deployInfo, final Object instance) { // get the mdb call context ThreadContext callContext = ThreadContext.getThreadContext(); boolean contextExitRequired = false; if (callContext == null) { callContext = new ThreadContext(deployInfo, null); ThreadContext.enter(callContext); contextExitRequired = true; } try { // if we have an mdb call context we need to invoke the after invoke method final MdbCallContext mdbCallContext = callContext.get(MdbCallContext.class); if (mdbCallContext != null) { try { afterInvoke(mdbCallContext.txPolicy, callContext); } catch (final Exception e) { logger.error("error while releasing message endpoint", e); } finally { final EndpointFactory endpointFactory = (EndpointFactory) deployInfo.getContainerData(); endpointFactory.getInstanceFactory().freeInstance((Instance) instance, false); } } } finally { if (contextExitRequired) { ThreadContext.exit(callContext); } } } private void addJMxControl(final BeanContext current, final String name, final MdbActivationContext activationContext) throws ResourceException { if (name == null || "false".equalsIgnoreCase(name)) { logger.debug("Not adding JMX control for " + current.getDeploymentID()); return; } final ObjectName jmxName; try { jmxName = "true".equalsIgnoreCase(name) ? new ObjectNameBuilder() .set("J2EEServer", "openejb") .set("J2EEApplication", null) .set("EJBModule", current.getModuleID()) .set("StatelessSessionBean", current.getEjbName()) .set("j2eeType", "control") .set("name", current.getEjbName()) .build() : new ObjectName(name); } catch (final MalformedObjectNameException e) { throw new IllegalArgumentException(e); } mbeanNames.put(current, jmxName); LocalMBeanServer.registerSilently(new MdbJmxControl(activationContext), jmxName); logger.info("Deployed MDB control for " + current.getDeploymentID() + " on " + jmxName); } public static BeanContext current() { final BeanContext beanContext = CURRENT.get(); if (beanContext == null) { CURRENT.remove(); } return beanContext; } private static class MdbCallContext { private Method deliveryMethod; private TransactionPolicy txPolicy; private ThreadContext oldCallContext; } private static class MdbActivationContext { private final ClassLoader classLoader; private final BeanContext beanContext; private final ResourceAdapter resourceAdapter; private final EndpointFactory endpointFactory; private final ActivationSpec activationSpec; private AtomicBoolean started = new AtomicBoolean(false); public MdbActivationContext(final ClassLoader classLoader, final BeanContext beanContext, final ResourceAdapter resourceAdapter, final EndpointFactory endpointFactory, final ActivationSpec activationSpec) { this.classLoader = classLoader; this.beanContext = beanContext; this.resourceAdapter = resourceAdapter; this.endpointFactory = endpointFactory; this.activationSpec = activationSpec; } public ResourceAdapter getResourceAdapter() { return resourceAdapter; } public EndpointFactory getEndpointFactory() { return endpointFactory; } public ActivationSpec getActivationSpec() { return activationSpec; } public boolean isStarted() { return started.get(); } public void start() throws ResourceException { if (!started.compareAndSet(false, true)) { return; } final ClassLoader oldCl = Thread.currentThread().getContextClassLoader(); try { Thread.currentThread().setContextClassLoader(classLoader); resourceAdapter.endpointActivation(endpointFactory, activationSpec); logger.info("Activated endpoint for " + beanContext.getDeploymentID()); } finally { Thread.currentThread().setContextClassLoader(oldCl); } } public void stop() { if (!started.compareAndSet(true, false)) { return; } final ClassLoader oldCl = Thread.currentThread().getContextClassLoader(); try { Thread.currentThread().setContextClassLoader(classLoader); resourceAdapter.endpointDeactivation(endpointFactory, activationSpec); logger.info("Deactivated endpoint for " + beanContext.getDeploymentID()); } finally { Thread.currentThread().setContextClassLoader(oldCl); } } } public static final class MdbJmxControl implements DynamicMBean { private static final AttributeList ATTRIBUTE_LIST = new AttributeList(); private static final MBeanInfo INFO = new MBeanInfo( "org.apache.openejb.resource.activemq.ActiveMQResourceAdapter.MdbJmxControl", "Allows to control a MDB (start/stop)", new MBeanAttributeInfo[]{ new MBeanAttributeInfo("started", "boolean", "started: boolean indicating whether this MDB endpoint has been activated.", true, false, true) }, new MBeanConstructorInfo[0], new MBeanOperationInfo[]{ new MBeanOperationInfo("start", "Ensure the listener is active.", new MBeanParameterInfo[0], "void", ACTION), new MBeanOperationInfo("stop", "Ensure the listener is not active.", new MBeanParameterInfo[0], "void", ACTION) }, new MBeanNotificationInfo[0]); private final MdbActivationContext activationContext; private MdbJmxControl(final MdbActivationContext activationContext) { this.activationContext = activationContext; } @Override public Object invoke(final String actionName, final Object[] params, final String[] signature) throws MBeanException, ReflectionException { if (actionName.equals("stop")) { activationContext.stop(); } else if (actionName.equals("start")) { try { activationContext.start(); } catch (ResourceException e) { logger.error("Error invoking " + actionName + ": " + e.getMessage()); throw new MBeanException(new IllegalStateException(e.getMessage(), e)); } } else { throw new MBeanException(new IllegalStateException("unsupported operation: " + actionName)); } return null; } @Override public MBeanInfo getMBeanInfo() { return INFO; } @Override public Object getAttribute(final String attribute) throws AttributeNotFoundException, MBeanException, ReflectionException { if ("started".equals(attribute)) { return activationContext.isStarted(); } throw new AttributeNotFoundException(); } @Override public void setAttribute(final Attribute attribute) throws AttributeNotFoundException, InvalidAttributeValueException, MBeanException, ReflectionException { throw new AttributeNotFoundException(); } @Override public AttributeList getAttributes(final String[] attributes) { return ATTRIBUTE_LIST; } @Override public AttributeList setAttributes(final AttributeList attributes) { return ATTRIBUTE_LIST; } } public static class InstanceMonitor { private final MdbInstanceFactory instanceFactory; public InstanceMonitor(MdbInstanceFactory instanceFactory) { this.instanceFactory = instanceFactory; } @Managed public int getInstanceLimit() { return instanceFactory.getInstanceLimit(); } @Managed public int getInstanceCount() { return instanceFactory.getInstanceCount(); } } }
googleads/google-ads-java
35,309
google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/resources/KeywordPlanForecastPeriod.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v19/resources/keyword_plan.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v19.resources; /** * <pre> * The forecasting period associated with the keyword plan. * </pre> * * Protobuf type {@code google.ads.googleads.v19.resources.KeywordPlanForecastPeriod} */ public final class KeywordPlanForecastPeriod extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v19.resources.KeywordPlanForecastPeriod) KeywordPlanForecastPeriodOrBuilder { private static final long serialVersionUID = 0L; // Use KeywordPlanForecastPeriod.newBuilder() to construct. private KeywordPlanForecastPeriod(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private KeywordPlanForecastPeriod() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new KeywordPlanForecastPeriod(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.resources.KeywordPlanProto.internal_static_google_ads_googleads_v19_resources_KeywordPlanForecastPeriod_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.resources.KeywordPlanProto.internal_static_google_ads_googleads_v19_resources_KeywordPlanForecastPeriod_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod.class, com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod.Builder.class); } private int intervalCase_ = 0; @SuppressWarnings("serial") private java.lang.Object interval_; public enum IntervalCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { DATE_INTERVAL(1), DATE_RANGE(2), INTERVAL_NOT_SET(0); private final int value; private IntervalCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static IntervalCase valueOf(int value) { return forNumber(value); } public static IntervalCase forNumber(int value) { switch (value) { case 1: return DATE_INTERVAL; case 2: return DATE_RANGE; case 0: return INTERVAL_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public IntervalCase getIntervalCase() { return IntervalCase.forNumber( intervalCase_); } public static final int DATE_INTERVAL_FIELD_NUMBER = 1; /** * <pre> * A future date range relative to the current date used for forecasting. * </pre> * * <code>.google.ads.googleads.v19.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval date_interval = 1;</code> * @return Whether the dateInterval field is set. */ public boolean hasDateInterval() { return intervalCase_ == 1; } /** * <pre> * A future date range relative to the current date used for forecasting. * </pre> * * <code>.google.ads.googleads.v19.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval date_interval = 1;</code> * @return The enum numeric value on the wire for dateInterval. */ public int getDateIntervalValue() { if (intervalCase_ == 1) { return (java.lang.Integer) interval_; } return 0; } /** * <pre> * A future date range relative to the current date used for forecasting. * </pre> * * <code>.google.ads.googleads.v19.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval date_interval = 1;</code> * @return The dateInterval. */ public com.google.ads.googleads.v19.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval getDateInterval() { if (intervalCase_ == 1) { com.google.ads.googleads.v19.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval result = com.google.ads.googleads.v19.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval.forNumber( (java.lang.Integer) interval_); return result == null ? com.google.ads.googleads.v19.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval.UNRECOGNIZED : result; } return com.google.ads.googleads.v19.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval.UNSPECIFIED; } public static final int DATE_RANGE_FIELD_NUMBER = 2; /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v19.common.DateRange date_range = 2;</code> * @return Whether the dateRange field is set. */ @java.lang.Override public boolean hasDateRange() { return intervalCase_ == 2; } /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v19.common.DateRange date_range = 2;</code> * @return The dateRange. */ @java.lang.Override public com.google.ads.googleads.v19.common.DateRange getDateRange() { if (intervalCase_ == 2) { return (com.google.ads.googleads.v19.common.DateRange) interval_; } return com.google.ads.googleads.v19.common.DateRange.getDefaultInstance(); } /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v19.common.DateRange date_range = 2;</code> */ @java.lang.Override public com.google.ads.googleads.v19.common.DateRangeOrBuilder getDateRangeOrBuilder() { if (intervalCase_ == 2) { return (com.google.ads.googleads.v19.common.DateRange) interval_; } return com.google.ads.googleads.v19.common.DateRange.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (intervalCase_ == 1) { output.writeEnum(1, ((java.lang.Integer) interval_)); } if (intervalCase_ == 2) { output.writeMessage(2, (com.google.ads.googleads.v19.common.DateRange) interval_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (intervalCase_ == 1) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(1, ((java.lang.Integer) interval_)); } if (intervalCase_ == 2) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, (com.google.ads.googleads.v19.common.DateRange) interval_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod)) { return super.equals(obj); } com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod other = (com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod) obj; if (!getIntervalCase().equals(other.getIntervalCase())) return false; switch (intervalCase_) { case 1: if (getDateIntervalValue() != other.getDateIntervalValue()) return false; break; case 2: if (!getDateRange() .equals(other.getDateRange())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); switch (intervalCase_) { case 1: hash = (37 * hash) + DATE_INTERVAL_FIELD_NUMBER; hash = (53 * hash) + getDateIntervalValue(); break; case 2: hash = (37 * hash) + DATE_RANGE_FIELD_NUMBER; hash = (53 * hash) + getDateRange().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * The forecasting period associated with the keyword plan. * </pre> * * Protobuf type {@code google.ads.googleads.v19.resources.KeywordPlanForecastPeriod} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.resources.KeywordPlanForecastPeriod) com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriodOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.resources.KeywordPlanProto.internal_static_google_ads_googleads_v19_resources_KeywordPlanForecastPeriod_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.resources.KeywordPlanProto.internal_static_google_ads_googleads_v19_resources_KeywordPlanForecastPeriod_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod.class, com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod.Builder.class); } // Construct using com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (dateRangeBuilder_ != null) { dateRangeBuilder_.clear(); } intervalCase_ = 0; interval_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v19.resources.KeywordPlanProto.internal_static_google_ads_googleads_v19_resources_KeywordPlanForecastPeriod_descriptor; } @java.lang.Override public com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod getDefaultInstanceForType() { return com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod build() { com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod buildPartial() { com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod result = new com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod result) { int from_bitField0_ = bitField0_; } private void buildPartialOneofs(com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod result) { result.intervalCase_ = intervalCase_; result.interval_ = this.interval_; if (intervalCase_ == 2 && dateRangeBuilder_ != null) { result.interval_ = dateRangeBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod) { return mergeFrom((com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod other) { if (other == com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod.getDefaultInstance()) return this; switch (other.getIntervalCase()) { case DATE_INTERVAL: { setDateIntervalValue(other.getDateIntervalValue()); break; } case DATE_RANGE: { mergeDateRange(other.getDateRange()); break; } case INTERVAL_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { int rawValue = input.readEnum(); intervalCase_ = 1; interval_ = rawValue; break; } // case 8 case 18: { input.readMessage( getDateRangeFieldBuilder().getBuilder(), extensionRegistry); intervalCase_ = 2; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int intervalCase_ = 0; private java.lang.Object interval_; public IntervalCase getIntervalCase() { return IntervalCase.forNumber( intervalCase_); } public Builder clearInterval() { intervalCase_ = 0; interval_ = null; onChanged(); return this; } private int bitField0_; /** * <pre> * A future date range relative to the current date used for forecasting. * </pre> * * <code>.google.ads.googleads.v19.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval date_interval = 1;</code> * @return Whether the dateInterval field is set. */ @java.lang.Override public boolean hasDateInterval() { return intervalCase_ == 1; } /** * <pre> * A future date range relative to the current date used for forecasting. * </pre> * * <code>.google.ads.googleads.v19.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval date_interval = 1;</code> * @return The enum numeric value on the wire for dateInterval. */ @java.lang.Override public int getDateIntervalValue() { if (intervalCase_ == 1) { return ((java.lang.Integer) interval_).intValue(); } return 0; } /** * <pre> * A future date range relative to the current date used for forecasting. * </pre> * * <code>.google.ads.googleads.v19.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval date_interval = 1;</code> * @param value The enum numeric value on the wire for dateInterval to set. * @return This builder for chaining. */ public Builder setDateIntervalValue(int value) { intervalCase_ = 1; interval_ = value; onChanged(); return this; } /** * <pre> * A future date range relative to the current date used for forecasting. * </pre> * * <code>.google.ads.googleads.v19.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval date_interval = 1;</code> * @return The dateInterval. */ @java.lang.Override public com.google.ads.googleads.v19.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval getDateInterval() { if (intervalCase_ == 1) { com.google.ads.googleads.v19.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval result = com.google.ads.googleads.v19.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval.forNumber( (java.lang.Integer) interval_); return result == null ? com.google.ads.googleads.v19.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval.UNRECOGNIZED : result; } return com.google.ads.googleads.v19.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval.UNSPECIFIED; } /** * <pre> * A future date range relative to the current date used for forecasting. * </pre> * * <code>.google.ads.googleads.v19.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval date_interval = 1;</code> * @param value The dateInterval to set. * @return This builder for chaining. */ public Builder setDateInterval(com.google.ads.googleads.v19.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval value) { if (value == null) { throw new NullPointerException(); } intervalCase_ = 1; interval_ = value.getNumber(); onChanged(); return this; } /** * <pre> * A future date range relative to the current date used for forecasting. * </pre> * * <code>.google.ads.googleads.v19.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval date_interval = 1;</code> * @return This builder for chaining. */ public Builder clearDateInterval() { if (intervalCase_ == 1) { intervalCase_ = 0; interval_ = null; onChanged(); } return this; } private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v19.common.DateRange, com.google.ads.googleads.v19.common.DateRange.Builder, com.google.ads.googleads.v19.common.DateRangeOrBuilder> dateRangeBuilder_; /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v19.common.DateRange date_range = 2;</code> * @return Whether the dateRange field is set. */ @java.lang.Override public boolean hasDateRange() { return intervalCase_ == 2; } /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v19.common.DateRange date_range = 2;</code> * @return The dateRange. */ @java.lang.Override public com.google.ads.googleads.v19.common.DateRange getDateRange() { if (dateRangeBuilder_ == null) { if (intervalCase_ == 2) { return (com.google.ads.googleads.v19.common.DateRange) interval_; } return com.google.ads.googleads.v19.common.DateRange.getDefaultInstance(); } else { if (intervalCase_ == 2) { return dateRangeBuilder_.getMessage(); } return com.google.ads.googleads.v19.common.DateRange.getDefaultInstance(); } } /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v19.common.DateRange date_range = 2;</code> */ public Builder setDateRange(com.google.ads.googleads.v19.common.DateRange value) { if (dateRangeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } interval_ = value; onChanged(); } else { dateRangeBuilder_.setMessage(value); } intervalCase_ = 2; return this; } /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v19.common.DateRange date_range = 2;</code> */ public Builder setDateRange( com.google.ads.googleads.v19.common.DateRange.Builder builderForValue) { if (dateRangeBuilder_ == null) { interval_ = builderForValue.build(); onChanged(); } else { dateRangeBuilder_.setMessage(builderForValue.build()); } intervalCase_ = 2; return this; } /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v19.common.DateRange date_range = 2;</code> */ public Builder mergeDateRange(com.google.ads.googleads.v19.common.DateRange value) { if (dateRangeBuilder_ == null) { if (intervalCase_ == 2 && interval_ != com.google.ads.googleads.v19.common.DateRange.getDefaultInstance()) { interval_ = com.google.ads.googleads.v19.common.DateRange.newBuilder((com.google.ads.googleads.v19.common.DateRange) interval_) .mergeFrom(value).buildPartial(); } else { interval_ = value; } onChanged(); } else { if (intervalCase_ == 2) { dateRangeBuilder_.mergeFrom(value); } else { dateRangeBuilder_.setMessage(value); } } intervalCase_ = 2; return this; } /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v19.common.DateRange date_range = 2;</code> */ public Builder clearDateRange() { if (dateRangeBuilder_ == null) { if (intervalCase_ == 2) { intervalCase_ = 0; interval_ = null; onChanged(); } } else { if (intervalCase_ == 2) { intervalCase_ = 0; interval_ = null; } dateRangeBuilder_.clear(); } return this; } /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v19.common.DateRange date_range = 2;</code> */ public com.google.ads.googleads.v19.common.DateRange.Builder getDateRangeBuilder() { return getDateRangeFieldBuilder().getBuilder(); } /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v19.common.DateRange date_range = 2;</code> */ @java.lang.Override public com.google.ads.googleads.v19.common.DateRangeOrBuilder getDateRangeOrBuilder() { if ((intervalCase_ == 2) && (dateRangeBuilder_ != null)) { return dateRangeBuilder_.getMessageOrBuilder(); } else { if (intervalCase_ == 2) { return (com.google.ads.googleads.v19.common.DateRange) interval_; } return com.google.ads.googleads.v19.common.DateRange.getDefaultInstance(); } } /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v19.common.DateRange date_range = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v19.common.DateRange, com.google.ads.googleads.v19.common.DateRange.Builder, com.google.ads.googleads.v19.common.DateRangeOrBuilder> getDateRangeFieldBuilder() { if (dateRangeBuilder_ == null) { if (!(intervalCase_ == 2)) { interval_ = com.google.ads.googleads.v19.common.DateRange.getDefaultInstance(); } dateRangeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v19.common.DateRange, com.google.ads.googleads.v19.common.DateRange.Builder, com.google.ads.googleads.v19.common.DateRangeOrBuilder>( (com.google.ads.googleads.v19.common.DateRange) interval_, getParentForChildren(), isClean()); interval_ = null; } intervalCase_ = 2; onChanged(); return dateRangeBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.resources.KeywordPlanForecastPeriod) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v19.resources.KeywordPlanForecastPeriod) private static final com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod(); } public static com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<KeywordPlanForecastPeriod> PARSER = new com.google.protobuf.AbstractParser<KeywordPlanForecastPeriod>() { @java.lang.Override public KeywordPlanForecastPeriod parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<KeywordPlanForecastPeriod> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<KeywordPlanForecastPeriod> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v19.resources.KeywordPlanForecastPeriod getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,309
google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/resources/KeywordPlanForecastPeriod.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v20/resources/keyword_plan.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v20.resources; /** * <pre> * The forecasting period associated with the keyword plan. * </pre> * * Protobuf type {@code google.ads.googleads.v20.resources.KeywordPlanForecastPeriod} */ public final class KeywordPlanForecastPeriod extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v20.resources.KeywordPlanForecastPeriod) KeywordPlanForecastPeriodOrBuilder { private static final long serialVersionUID = 0L; // Use KeywordPlanForecastPeriod.newBuilder() to construct. private KeywordPlanForecastPeriod(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private KeywordPlanForecastPeriod() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new KeywordPlanForecastPeriod(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.resources.KeywordPlanProto.internal_static_google_ads_googleads_v20_resources_KeywordPlanForecastPeriod_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.resources.KeywordPlanProto.internal_static_google_ads_googleads_v20_resources_KeywordPlanForecastPeriod_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod.class, com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod.Builder.class); } private int intervalCase_ = 0; @SuppressWarnings("serial") private java.lang.Object interval_; public enum IntervalCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { DATE_INTERVAL(1), DATE_RANGE(2), INTERVAL_NOT_SET(0); private final int value; private IntervalCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static IntervalCase valueOf(int value) { return forNumber(value); } public static IntervalCase forNumber(int value) { switch (value) { case 1: return DATE_INTERVAL; case 2: return DATE_RANGE; case 0: return INTERVAL_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public IntervalCase getIntervalCase() { return IntervalCase.forNumber( intervalCase_); } public static final int DATE_INTERVAL_FIELD_NUMBER = 1; /** * <pre> * A future date range relative to the current date used for forecasting. * </pre> * * <code>.google.ads.googleads.v20.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval date_interval = 1;</code> * @return Whether the dateInterval field is set. */ public boolean hasDateInterval() { return intervalCase_ == 1; } /** * <pre> * A future date range relative to the current date used for forecasting. * </pre> * * <code>.google.ads.googleads.v20.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval date_interval = 1;</code> * @return The enum numeric value on the wire for dateInterval. */ public int getDateIntervalValue() { if (intervalCase_ == 1) { return (java.lang.Integer) interval_; } return 0; } /** * <pre> * A future date range relative to the current date used for forecasting. * </pre> * * <code>.google.ads.googleads.v20.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval date_interval = 1;</code> * @return The dateInterval. */ public com.google.ads.googleads.v20.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval getDateInterval() { if (intervalCase_ == 1) { com.google.ads.googleads.v20.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval result = com.google.ads.googleads.v20.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval.forNumber( (java.lang.Integer) interval_); return result == null ? com.google.ads.googleads.v20.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval.UNRECOGNIZED : result; } return com.google.ads.googleads.v20.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval.UNSPECIFIED; } public static final int DATE_RANGE_FIELD_NUMBER = 2; /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v20.common.DateRange date_range = 2;</code> * @return Whether the dateRange field is set. */ @java.lang.Override public boolean hasDateRange() { return intervalCase_ == 2; } /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v20.common.DateRange date_range = 2;</code> * @return The dateRange. */ @java.lang.Override public com.google.ads.googleads.v20.common.DateRange getDateRange() { if (intervalCase_ == 2) { return (com.google.ads.googleads.v20.common.DateRange) interval_; } return com.google.ads.googleads.v20.common.DateRange.getDefaultInstance(); } /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v20.common.DateRange date_range = 2;</code> */ @java.lang.Override public com.google.ads.googleads.v20.common.DateRangeOrBuilder getDateRangeOrBuilder() { if (intervalCase_ == 2) { return (com.google.ads.googleads.v20.common.DateRange) interval_; } return com.google.ads.googleads.v20.common.DateRange.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (intervalCase_ == 1) { output.writeEnum(1, ((java.lang.Integer) interval_)); } if (intervalCase_ == 2) { output.writeMessage(2, (com.google.ads.googleads.v20.common.DateRange) interval_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (intervalCase_ == 1) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(1, ((java.lang.Integer) interval_)); } if (intervalCase_ == 2) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, (com.google.ads.googleads.v20.common.DateRange) interval_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod)) { return super.equals(obj); } com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod other = (com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod) obj; if (!getIntervalCase().equals(other.getIntervalCase())) return false; switch (intervalCase_) { case 1: if (getDateIntervalValue() != other.getDateIntervalValue()) return false; break; case 2: if (!getDateRange() .equals(other.getDateRange())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); switch (intervalCase_) { case 1: hash = (37 * hash) + DATE_INTERVAL_FIELD_NUMBER; hash = (53 * hash) + getDateIntervalValue(); break; case 2: hash = (37 * hash) + DATE_RANGE_FIELD_NUMBER; hash = (53 * hash) + getDateRange().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * The forecasting period associated with the keyword plan. * </pre> * * Protobuf type {@code google.ads.googleads.v20.resources.KeywordPlanForecastPeriod} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.resources.KeywordPlanForecastPeriod) com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriodOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.resources.KeywordPlanProto.internal_static_google_ads_googleads_v20_resources_KeywordPlanForecastPeriod_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.resources.KeywordPlanProto.internal_static_google_ads_googleads_v20_resources_KeywordPlanForecastPeriod_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod.class, com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod.Builder.class); } // Construct using com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (dateRangeBuilder_ != null) { dateRangeBuilder_.clear(); } intervalCase_ = 0; interval_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v20.resources.KeywordPlanProto.internal_static_google_ads_googleads_v20_resources_KeywordPlanForecastPeriod_descriptor; } @java.lang.Override public com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod getDefaultInstanceForType() { return com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod build() { com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod buildPartial() { com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod result = new com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod result) { int from_bitField0_ = bitField0_; } private void buildPartialOneofs(com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod result) { result.intervalCase_ = intervalCase_; result.interval_ = this.interval_; if (intervalCase_ == 2 && dateRangeBuilder_ != null) { result.interval_ = dateRangeBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod) { return mergeFrom((com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod other) { if (other == com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod.getDefaultInstance()) return this; switch (other.getIntervalCase()) { case DATE_INTERVAL: { setDateIntervalValue(other.getDateIntervalValue()); break; } case DATE_RANGE: { mergeDateRange(other.getDateRange()); break; } case INTERVAL_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { int rawValue = input.readEnum(); intervalCase_ = 1; interval_ = rawValue; break; } // case 8 case 18: { input.readMessage( getDateRangeFieldBuilder().getBuilder(), extensionRegistry); intervalCase_ = 2; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int intervalCase_ = 0; private java.lang.Object interval_; public IntervalCase getIntervalCase() { return IntervalCase.forNumber( intervalCase_); } public Builder clearInterval() { intervalCase_ = 0; interval_ = null; onChanged(); return this; } private int bitField0_; /** * <pre> * A future date range relative to the current date used for forecasting. * </pre> * * <code>.google.ads.googleads.v20.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval date_interval = 1;</code> * @return Whether the dateInterval field is set. */ @java.lang.Override public boolean hasDateInterval() { return intervalCase_ == 1; } /** * <pre> * A future date range relative to the current date used for forecasting. * </pre> * * <code>.google.ads.googleads.v20.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval date_interval = 1;</code> * @return The enum numeric value on the wire for dateInterval. */ @java.lang.Override public int getDateIntervalValue() { if (intervalCase_ == 1) { return ((java.lang.Integer) interval_).intValue(); } return 0; } /** * <pre> * A future date range relative to the current date used for forecasting. * </pre> * * <code>.google.ads.googleads.v20.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval date_interval = 1;</code> * @param value The enum numeric value on the wire for dateInterval to set. * @return This builder for chaining. */ public Builder setDateIntervalValue(int value) { intervalCase_ = 1; interval_ = value; onChanged(); return this; } /** * <pre> * A future date range relative to the current date used for forecasting. * </pre> * * <code>.google.ads.googleads.v20.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval date_interval = 1;</code> * @return The dateInterval. */ @java.lang.Override public com.google.ads.googleads.v20.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval getDateInterval() { if (intervalCase_ == 1) { com.google.ads.googleads.v20.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval result = com.google.ads.googleads.v20.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval.forNumber( (java.lang.Integer) interval_); return result == null ? com.google.ads.googleads.v20.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval.UNRECOGNIZED : result; } return com.google.ads.googleads.v20.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval.UNSPECIFIED; } /** * <pre> * A future date range relative to the current date used for forecasting. * </pre> * * <code>.google.ads.googleads.v20.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval date_interval = 1;</code> * @param value The dateInterval to set. * @return This builder for chaining. */ public Builder setDateInterval(com.google.ads.googleads.v20.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval value) { if (value == null) { throw new NullPointerException(); } intervalCase_ = 1; interval_ = value.getNumber(); onChanged(); return this; } /** * <pre> * A future date range relative to the current date used for forecasting. * </pre> * * <code>.google.ads.googleads.v20.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval date_interval = 1;</code> * @return This builder for chaining. */ public Builder clearDateInterval() { if (intervalCase_ == 1) { intervalCase_ = 0; interval_ = null; onChanged(); } return this; } private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v20.common.DateRange, com.google.ads.googleads.v20.common.DateRange.Builder, com.google.ads.googleads.v20.common.DateRangeOrBuilder> dateRangeBuilder_; /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v20.common.DateRange date_range = 2;</code> * @return Whether the dateRange field is set. */ @java.lang.Override public boolean hasDateRange() { return intervalCase_ == 2; } /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v20.common.DateRange date_range = 2;</code> * @return The dateRange. */ @java.lang.Override public com.google.ads.googleads.v20.common.DateRange getDateRange() { if (dateRangeBuilder_ == null) { if (intervalCase_ == 2) { return (com.google.ads.googleads.v20.common.DateRange) interval_; } return com.google.ads.googleads.v20.common.DateRange.getDefaultInstance(); } else { if (intervalCase_ == 2) { return dateRangeBuilder_.getMessage(); } return com.google.ads.googleads.v20.common.DateRange.getDefaultInstance(); } } /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v20.common.DateRange date_range = 2;</code> */ public Builder setDateRange(com.google.ads.googleads.v20.common.DateRange value) { if (dateRangeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } interval_ = value; onChanged(); } else { dateRangeBuilder_.setMessage(value); } intervalCase_ = 2; return this; } /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v20.common.DateRange date_range = 2;</code> */ public Builder setDateRange( com.google.ads.googleads.v20.common.DateRange.Builder builderForValue) { if (dateRangeBuilder_ == null) { interval_ = builderForValue.build(); onChanged(); } else { dateRangeBuilder_.setMessage(builderForValue.build()); } intervalCase_ = 2; return this; } /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v20.common.DateRange date_range = 2;</code> */ public Builder mergeDateRange(com.google.ads.googleads.v20.common.DateRange value) { if (dateRangeBuilder_ == null) { if (intervalCase_ == 2 && interval_ != com.google.ads.googleads.v20.common.DateRange.getDefaultInstance()) { interval_ = com.google.ads.googleads.v20.common.DateRange.newBuilder((com.google.ads.googleads.v20.common.DateRange) interval_) .mergeFrom(value).buildPartial(); } else { interval_ = value; } onChanged(); } else { if (intervalCase_ == 2) { dateRangeBuilder_.mergeFrom(value); } else { dateRangeBuilder_.setMessage(value); } } intervalCase_ = 2; return this; } /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v20.common.DateRange date_range = 2;</code> */ public Builder clearDateRange() { if (dateRangeBuilder_ == null) { if (intervalCase_ == 2) { intervalCase_ = 0; interval_ = null; onChanged(); } } else { if (intervalCase_ == 2) { intervalCase_ = 0; interval_ = null; } dateRangeBuilder_.clear(); } return this; } /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v20.common.DateRange date_range = 2;</code> */ public com.google.ads.googleads.v20.common.DateRange.Builder getDateRangeBuilder() { return getDateRangeFieldBuilder().getBuilder(); } /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v20.common.DateRange date_range = 2;</code> */ @java.lang.Override public com.google.ads.googleads.v20.common.DateRangeOrBuilder getDateRangeOrBuilder() { if ((intervalCase_ == 2) && (dateRangeBuilder_ != null)) { return dateRangeBuilder_.getMessageOrBuilder(); } else { if (intervalCase_ == 2) { return (com.google.ads.googleads.v20.common.DateRange) interval_; } return com.google.ads.googleads.v20.common.DateRange.getDefaultInstance(); } } /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v20.common.DateRange date_range = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v20.common.DateRange, com.google.ads.googleads.v20.common.DateRange.Builder, com.google.ads.googleads.v20.common.DateRangeOrBuilder> getDateRangeFieldBuilder() { if (dateRangeBuilder_ == null) { if (!(intervalCase_ == 2)) { interval_ = com.google.ads.googleads.v20.common.DateRange.getDefaultInstance(); } dateRangeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v20.common.DateRange, com.google.ads.googleads.v20.common.DateRange.Builder, com.google.ads.googleads.v20.common.DateRangeOrBuilder>( (com.google.ads.googleads.v20.common.DateRange) interval_, getParentForChildren(), isClean()); interval_ = null; } intervalCase_ = 2; onChanged(); return dateRangeBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.resources.KeywordPlanForecastPeriod) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v20.resources.KeywordPlanForecastPeriod) private static final com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod(); } public static com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<KeywordPlanForecastPeriod> PARSER = new com.google.protobuf.AbstractParser<KeywordPlanForecastPeriod>() { @java.lang.Override public KeywordPlanForecastPeriod parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<KeywordPlanForecastPeriod> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<KeywordPlanForecastPeriod> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v20.resources.KeywordPlanForecastPeriod getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,309
google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/resources/KeywordPlanForecastPeriod.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v21/resources/keyword_plan.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v21.resources; /** * <pre> * The forecasting period associated with the keyword plan. * </pre> * * Protobuf type {@code google.ads.googleads.v21.resources.KeywordPlanForecastPeriod} */ public final class KeywordPlanForecastPeriod extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v21.resources.KeywordPlanForecastPeriod) KeywordPlanForecastPeriodOrBuilder { private static final long serialVersionUID = 0L; // Use KeywordPlanForecastPeriod.newBuilder() to construct. private KeywordPlanForecastPeriod(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private KeywordPlanForecastPeriod() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new KeywordPlanForecastPeriod(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.resources.KeywordPlanProto.internal_static_google_ads_googleads_v21_resources_KeywordPlanForecastPeriod_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.resources.KeywordPlanProto.internal_static_google_ads_googleads_v21_resources_KeywordPlanForecastPeriod_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod.class, com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod.Builder.class); } private int intervalCase_ = 0; @SuppressWarnings("serial") private java.lang.Object interval_; public enum IntervalCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { DATE_INTERVAL(1), DATE_RANGE(2), INTERVAL_NOT_SET(0); private final int value; private IntervalCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static IntervalCase valueOf(int value) { return forNumber(value); } public static IntervalCase forNumber(int value) { switch (value) { case 1: return DATE_INTERVAL; case 2: return DATE_RANGE; case 0: return INTERVAL_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public IntervalCase getIntervalCase() { return IntervalCase.forNumber( intervalCase_); } public static final int DATE_INTERVAL_FIELD_NUMBER = 1; /** * <pre> * A future date range relative to the current date used for forecasting. * </pre> * * <code>.google.ads.googleads.v21.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval date_interval = 1;</code> * @return Whether the dateInterval field is set. */ public boolean hasDateInterval() { return intervalCase_ == 1; } /** * <pre> * A future date range relative to the current date used for forecasting. * </pre> * * <code>.google.ads.googleads.v21.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval date_interval = 1;</code> * @return The enum numeric value on the wire for dateInterval. */ public int getDateIntervalValue() { if (intervalCase_ == 1) { return (java.lang.Integer) interval_; } return 0; } /** * <pre> * A future date range relative to the current date used for forecasting. * </pre> * * <code>.google.ads.googleads.v21.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval date_interval = 1;</code> * @return The dateInterval. */ public com.google.ads.googleads.v21.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval getDateInterval() { if (intervalCase_ == 1) { com.google.ads.googleads.v21.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval result = com.google.ads.googleads.v21.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval.forNumber( (java.lang.Integer) interval_); return result == null ? com.google.ads.googleads.v21.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval.UNRECOGNIZED : result; } return com.google.ads.googleads.v21.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval.UNSPECIFIED; } public static final int DATE_RANGE_FIELD_NUMBER = 2; /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v21.common.DateRange date_range = 2;</code> * @return Whether the dateRange field is set. */ @java.lang.Override public boolean hasDateRange() { return intervalCase_ == 2; } /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v21.common.DateRange date_range = 2;</code> * @return The dateRange. */ @java.lang.Override public com.google.ads.googleads.v21.common.DateRange getDateRange() { if (intervalCase_ == 2) { return (com.google.ads.googleads.v21.common.DateRange) interval_; } return com.google.ads.googleads.v21.common.DateRange.getDefaultInstance(); } /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v21.common.DateRange date_range = 2;</code> */ @java.lang.Override public com.google.ads.googleads.v21.common.DateRangeOrBuilder getDateRangeOrBuilder() { if (intervalCase_ == 2) { return (com.google.ads.googleads.v21.common.DateRange) interval_; } return com.google.ads.googleads.v21.common.DateRange.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (intervalCase_ == 1) { output.writeEnum(1, ((java.lang.Integer) interval_)); } if (intervalCase_ == 2) { output.writeMessage(2, (com.google.ads.googleads.v21.common.DateRange) interval_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (intervalCase_ == 1) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(1, ((java.lang.Integer) interval_)); } if (intervalCase_ == 2) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, (com.google.ads.googleads.v21.common.DateRange) interval_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod)) { return super.equals(obj); } com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod other = (com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod) obj; if (!getIntervalCase().equals(other.getIntervalCase())) return false; switch (intervalCase_) { case 1: if (getDateIntervalValue() != other.getDateIntervalValue()) return false; break; case 2: if (!getDateRange() .equals(other.getDateRange())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); switch (intervalCase_) { case 1: hash = (37 * hash) + DATE_INTERVAL_FIELD_NUMBER; hash = (53 * hash) + getDateIntervalValue(); break; case 2: hash = (37 * hash) + DATE_RANGE_FIELD_NUMBER; hash = (53 * hash) + getDateRange().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * The forecasting period associated with the keyword plan. * </pre> * * Protobuf type {@code google.ads.googleads.v21.resources.KeywordPlanForecastPeriod} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.resources.KeywordPlanForecastPeriod) com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriodOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.resources.KeywordPlanProto.internal_static_google_ads_googleads_v21_resources_KeywordPlanForecastPeriod_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.resources.KeywordPlanProto.internal_static_google_ads_googleads_v21_resources_KeywordPlanForecastPeriod_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod.class, com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod.Builder.class); } // Construct using com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (dateRangeBuilder_ != null) { dateRangeBuilder_.clear(); } intervalCase_ = 0; interval_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v21.resources.KeywordPlanProto.internal_static_google_ads_googleads_v21_resources_KeywordPlanForecastPeriod_descriptor; } @java.lang.Override public com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod getDefaultInstanceForType() { return com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod build() { com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod buildPartial() { com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod result = new com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod result) { int from_bitField0_ = bitField0_; } private void buildPartialOneofs(com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod result) { result.intervalCase_ = intervalCase_; result.interval_ = this.interval_; if (intervalCase_ == 2 && dateRangeBuilder_ != null) { result.interval_ = dateRangeBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod) { return mergeFrom((com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod other) { if (other == com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod.getDefaultInstance()) return this; switch (other.getIntervalCase()) { case DATE_INTERVAL: { setDateIntervalValue(other.getDateIntervalValue()); break; } case DATE_RANGE: { mergeDateRange(other.getDateRange()); break; } case INTERVAL_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { int rawValue = input.readEnum(); intervalCase_ = 1; interval_ = rawValue; break; } // case 8 case 18: { input.readMessage( getDateRangeFieldBuilder().getBuilder(), extensionRegistry); intervalCase_ = 2; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int intervalCase_ = 0; private java.lang.Object interval_; public IntervalCase getIntervalCase() { return IntervalCase.forNumber( intervalCase_); } public Builder clearInterval() { intervalCase_ = 0; interval_ = null; onChanged(); return this; } private int bitField0_; /** * <pre> * A future date range relative to the current date used for forecasting. * </pre> * * <code>.google.ads.googleads.v21.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval date_interval = 1;</code> * @return Whether the dateInterval field is set. */ @java.lang.Override public boolean hasDateInterval() { return intervalCase_ == 1; } /** * <pre> * A future date range relative to the current date used for forecasting. * </pre> * * <code>.google.ads.googleads.v21.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval date_interval = 1;</code> * @return The enum numeric value on the wire for dateInterval. */ @java.lang.Override public int getDateIntervalValue() { if (intervalCase_ == 1) { return ((java.lang.Integer) interval_).intValue(); } return 0; } /** * <pre> * A future date range relative to the current date used for forecasting. * </pre> * * <code>.google.ads.googleads.v21.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval date_interval = 1;</code> * @param value The enum numeric value on the wire for dateInterval to set. * @return This builder for chaining. */ public Builder setDateIntervalValue(int value) { intervalCase_ = 1; interval_ = value; onChanged(); return this; } /** * <pre> * A future date range relative to the current date used for forecasting. * </pre> * * <code>.google.ads.googleads.v21.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval date_interval = 1;</code> * @return The dateInterval. */ @java.lang.Override public com.google.ads.googleads.v21.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval getDateInterval() { if (intervalCase_ == 1) { com.google.ads.googleads.v21.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval result = com.google.ads.googleads.v21.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval.forNumber( (java.lang.Integer) interval_); return result == null ? com.google.ads.googleads.v21.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval.UNRECOGNIZED : result; } return com.google.ads.googleads.v21.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval.UNSPECIFIED; } /** * <pre> * A future date range relative to the current date used for forecasting. * </pre> * * <code>.google.ads.googleads.v21.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval date_interval = 1;</code> * @param value The dateInterval to set. * @return This builder for chaining. */ public Builder setDateInterval(com.google.ads.googleads.v21.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval value) { if (value == null) { throw new NullPointerException(); } intervalCase_ = 1; interval_ = value.getNumber(); onChanged(); return this; } /** * <pre> * A future date range relative to the current date used for forecasting. * </pre> * * <code>.google.ads.googleads.v21.enums.KeywordPlanForecastIntervalEnum.KeywordPlanForecastInterval date_interval = 1;</code> * @return This builder for chaining. */ public Builder clearDateInterval() { if (intervalCase_ == 1) { intervalCase_ = 0; interval_ = null; onChanged(); } return this; } private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v21.common.DateRange, com.google.ads.googleads.v21.common.DateRange.Builder, com.google.ads.googleads.v21.common.DateRangeOrBuilder> dateRangeBuilder_; /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v21.common.DateRange date_range = 2;</code> * @return Whether the dateRange field is set. */ @java.lang.Override public boolean hasDateRange() { return intervalCase_ == 2; } /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v21.common.DateRange date_range = 2;</code> * @return The dateRange. */ @java.lang.Override public com.google.ads.googleads.v21.common.DateRange getDateRange() { if (dateRangeBuilder_ == null) { if (intervalCase_ == 2) { return (com.google.ads.googleads.v21.common.DateRange) interval_; } return com.google.ads.googleads.v21.common.DateRange.getDefaultInstance(); } else { if (intervalCase_ == 2) { return dateRangeBuilder_.getMessage(); } return com.google.ads.googleads.v21.common.DateRange.getDefaultInstance(); } } /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v21.common.DateRange date_range = 2;</code> */ public Builder setDateRange(com.google.ads.googleads.v21.common.DateRange value) { if (dateRangeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } interval_ = value; onChanged(); } else { dateRangeBuilder_.setMessage(value); } intervalCase_ = 2; return this; } /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v21.common.DateRange date_range = 2;</code> */ public Builder setDateRange( com.google.ads.googleads.v21.common.DateRange.Builder builderForValue) { if (dateRangeBuilder_ == null) { interval_ = builderForValue.build(); onChanged(); } else { dateRangeBuilder_.setMessage(builderForValue.build()); } intervalCase_ = 2; return this; } /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v21.common.DateRange date_range = 2;</code> */ public Builder mergeDateRange(com.google.ads.googleads.v21.common.DateRange value) { if (dateRangeBuilder_ == null) { if (intervalCase_ == 2 && interval_ != com.google.ads.googleads.v21.common.DateRange.getDefaultInstance()) { interval_ = com.google.ads.googleads.v21.common.DateRange.newBuilder((com.google.ads.googleads.v21.common.DateRange) interval_) .mergeFrom(value).buildPartial(); } else { interval_ = value; } onChanged(); } else { if (intervalCase_ == 2) { dateRangeBuilder_.mergeFrom(value); } else { dateRangeBuilder_.setMessage(value); } } intervalCase_ = 2; return this; } /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v21.common.DateRange date_range = 2;</code> */ public Builder clearDateRange() { if (dateRangeBuilder_ == null) { if (intervalCase_ == 2) { intervalCase_ = 0; interval_ = null; onChanged(); } } else { if (intervalCase_ == 2) { intervalCase_ = 0; interval_ = null; } dateRangeBuilder_.clear(); } return this; } /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v21.common.DateRange date_range = 2;</code> */ public com.google.ads.googleads.v21.common.DateRange.Builder getDateRangeBuilder() { return getDateRangeFieldBuilder().getBuilder(); } /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v21.common.DateRange date_range = 2;</code> */ @java.lang.Override public com.google.ads.googleads.v21.common.DateRangeOrBuilder getDateRangeOrBuilder() { if ((intervalCase_ == 2) && (dateRangeBuilder_ != null)) { return dateRangeBuilder_.getMessageOrBuilder(); } else { if (intervalCase_ == 2) { return (com.google.ads.googleads.v21.common.DateRange) interval_; } return com.google.ads.googleads.v21.common.DateRange.getDefaultInstance(); } } /** * <pre> * The custom date range used for forecasting. It cannot be greater than * a year. * The start and end dates must be in the future. Otherwise, an error will * be returned when the forecasting action is performed. * The start and end dates are inclusive. * </pre> * * <code>.google.ads.googleads.v21.common.DateRange date_range = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v21.common.DateRange, com.google.ads.googleads.v21.common.DateRange.Builder, com.google.ads.googleads.v21.common.DateRangeOrBuilder> getDateRangeFieldBuilder() { if (dateRangeBuilder_ == null) { if (!(intervalCase_ == 2)) { interval_ = com.google.ads.googleads.v21.common.DateRange.getDefaultInstance(); } dateRangeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v21.common.DateRange, com.google.ads.googleads.v21.common.DateRange.Builder, com.google.ads.googleads.v21.common.DateRangeOrBuilder>( (com.google.ads.googleads.v21.common.DateRange) interval_, getParentForChildren(), isClean()); interval_ = null; } intervalCase_ = 2; onChanged(); return dateRangeBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.resources.KeywordPlanForecastPeriod) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v21.resources.KeywordPlanForecastPeriod) private static final com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod(); } public static com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<KeywordPlanForecastPeriod> PARSER = new com.google.protobuf.AbstractParser<KeywordPlanForecastPeriod>() { @java.lang.Override public KeywordPlanForecastPeriod parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<KeywordPlanForecastPeriod> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<KeywordPlanForecastPeriod> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v21.resources.KeywordPlanForecastPeriod getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,425
java-secretmanager/google-cloud-secretmanager/src/main/java/com/google/cloud/secretmanager/v1beta1/stub/GrpcSecretManagerServiceStub.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.secretmanager.v1beta1.stub; import static com.google.cloud.secretmanager.v1beta1.SecretManagerServiceClient.ListLocationsPagedResponse; import static com.google.cloud.secretmanager.v1beta1.SecretManagerServiceClient.ListSecretVersionsPagedResponse; import static com.google.cloud.secretmanager.v1beta1.SecretManagerServiceClient.ListSecretsPagedResponse; import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcStubCallableFactory; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.RequestParamsBuilder; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.location.GetLocationRequest; import com.google.cloud.location.ListLocationsRequest; import com.google.cloud.location.ListLocationsResponse; import com.google.cloud.location.Location; import com.google.cloud.secretmanager.v1beta1.AccessSecretVersionRequest; import com.google.cloud.secretmanager.v1beta1.AccessSecretVersionResponse; import com.google.cloud.secretmanager.v1beta1.AddSecretVersionRequest; import com.google.cloud.secretmanager.v1beta1.CreateSecretRequest; import com.google.cloud.secretmanager.v1beta1.DeleteSecretRequest; import com.google.cloud.secretmanager.v1beta1.DestroySecretVersionRequest; import com.google.cloud.secretmanager.v1beta1.DisableSecretVersionRequest; import com.google.cloud.secretmanager.v1beta1.EnableSecretVersionRequest; import com.google.cloud.secretmanager.v1beta1.GetSecretRequest; import com.google.cloud.secretmanager.v1beta1.GetSecretVersionRequest; import com.google.cloud.secretmanager.v1beta1.ListSecretVersionsRequest; import com.google.cloud.secretmanager.v1beta1.ListSecretVersionsResponse; import com.google.cloud.secretmanager.v1beta1.ListSecretsRequest; import com.google.cloud.secretmanager.v1beta1.ListSecretsResponse; import com.google.cloud.secretmanager.v1beta1.Secret; import com.google.cloud.secretmanager.v1beta1.SecretVersion; import com.google.cloud.secretmanager.v1beta1.UpdateSecretRequest; import com.google.iam.v1.GetIamPolicyRequest; import com.google.iam.v1.Policy; import com.google.iam.v1.SetIamPolicyRequest; import com.google.iam.v1.TestIamPermissionsRequest; import com.google.iam.v1.TestIamPermissionsResponse; import com.google.longrunning.stub.GrpcOperationsStub; import com.google.protobuf.Empty; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; import java.io.IOException; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * gRPC stub implementation for the SecretManagerService service API. * * <p>This class is for advanced usage and reflects the underlying API directly. */ @BetaApi @Generated("by gapic-generator-java") public class GrpcSecretManagerServiceStub extends SecretManagerServiceStub { private static final MethodDescriptor<ListSecretsRequest, ListSecretsResponse> listSecretsMethodDescriptor = MethodDescriptor.<ListSecretsRequest, ListSecretsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.secrets.v1beta1.SecretManagerService/ListSecrets") .setRequestMarshaller(ProtoUtils.marshaller(ListSecretsRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(ListSecretsResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<CreateSecretRequest, Secret> createSecretMethodDescriptor = MethodDescriptor.<CreateSecretRequest, Secret>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.secrets.v1beta1.SecretManagerService/CreateSecret") .setRequestMarshaller(ProtoUtils.marshaller(CreateSecretRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Secret.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<AddSecretVersionRequest, SecretVersion> addSecretVersionMethodDescriptor = MethodDescriptor.<AddSecretVersionRequest, SecretVersion>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.secrets.v1beta1.SecretManagerService/AddSecretVersion") .setRequestMarshaller( ProtoUtils.marshaller(AddSecretVersionRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(SecretVersion.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<GetSecretRequest, Secret> getSecretMethodDescriptor = MethodDescriptor.<GetSecretRequest, Secret>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.secrets.v1beta1.SecretManagerService/GetSecret") .setRequestMarshaller(ProtoUtils.marshaller(GetSecretRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Secret.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<UpdateSecretRequest, Secret> updateSecretMethodDescriptor = MethodDescriptor.<UpdateSecretRequest, Secret>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.secrets.v1beta1.SecretManagerService/UpdateSecret") .setRequestMarshaller(ProtoUtils.marshaller(UpdateSecretRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Secret.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<DeleteSecretRequest, Empty> deleteSecretMethodDescriptor = MethodDescriptor.<DeleteSecretRequest, Empty>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.secrets.v1beta1.SecretManagerService/DeleteSecret") .setRequestMarshaller(ProtoUtils.marshaller(DeleteSecretRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<ListSecretVersionsRequest, ListSecretVersionsResponse> listSecretVersionsMethodDescriptor = MethodDescriptor.<ListSecretVersionsRequest, ListSecretVersionsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.secrets.v1beta1.SecretManagerService/ListSecretVersions") .setRequestMarshaller( ProtoUtils.marshaller(ListSecretVersionsRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(ListSecretVersionsResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<GetSecretVersionRequest, SecretVersion> getSecretVersionMethodDescriptor = MethodDescriptor.<GetSecretVersionRequest, SecretVersion>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.secrets.v1beta1.SecretManagerService/GetSecretVersion") .setRequestMarshaller( ProtoUtils.marshaller(GetSecretVersionRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(SecretVersion.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<AccessSecretVersionRequest, AccessSecretVersionResponse> accessSecretVersionMethodDescriptor = MethodDescriptor.<AccessSecretVersionRequest, AccessSecretVersionResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.secrets.v1beta1.SecretManagerService/AccessSecretVersion") .setRequestMarshaller( ProtoUtils.marshaller(AccessSecretVersionRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(AccessSecretVersionResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<DisableSecretVersionRequest, SecretVersion> disableSecretVersionMethodDescriptor = MethodDescriptor.<DisableSecretVersionRequest, SecretVersion>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.secrets.v1beta1.SecretManagerService/DisableSecretVersion") .setRequestMarshaller( ProtoUtils.marshaller(DisableSecretVersionRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(SecretVersion.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<EnableSecretVersionRequest, SecretVersion> enableSecretVersionMethodDescriptor = MethodDescriptor.<EnableSecretVersionRequest, SecretVersion>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.secrets.v1beta1.SecretManagerService/EnableSecretVersion") .setRequestMarshaller( ProtoUtils.marshaller(EnableSecretVersionRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(SecretVersion.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<DestroySecretVersionRequest, SecretVersion> destroySecretVersionMethodDescriptor = MethodDescriptor.<DestroySecretVersionRequest, SecretVersion>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.secrets.v1beta1.SecretManagerService/DestroySecretVersion") .setRequestMarshaller( ProtoUtils.marshaller(DestroySecretVersionRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(SecretVersion.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<SetIamPolicyRequest, Policy> setIamPolicyMethodDescriptor = MethodDescriptor.<SetIamPolicyRequest, Policy>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.secrets.v1beta1.SecretManagerService/SetIamPolicy") .setRequestMarshaller(ProtoUtils.marshaller(SetIamPolicyRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Policy.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<GetIamPolicyRequest, Policy> getIamPolicyMethodDescriptor = MethodDescriptor.<GetIamPolicyRequest, Policy>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.secrets.v1beta1.SecretManagerService/GetIamPolicy") .setRequestMarshaller(ProtoUtils.marshaller(GetIamPolicyRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Policy.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsMethodDescriptor = MethodDescriptor.<TestIamPermissionsRequest, TestIamPermissionsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.secrets.v1beta1.SecretManagerService/TestIamPermissions") .setRequestMarshaller( ProtoUtils.marshaller(TestIamPermissionsRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(TestIamPermissionsResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<ListLocationsRequest, ListLocationsResponse> listLocationsMethodDescriptor = MethodDescriptor.<ListLocationsRequest, ListLocationsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.location.Locations/ListLocations") .setRequestMarshaller( ProtoUtils.marshaller(ListLocationsRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(ListLocationsResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<GetLocationRequest, Location> getLocationMethodDescriptor = MethodDescriptor.<GetLocationRequest, Location>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.location.Locations/GetLocation") .setRequestMarshaller(ProtoUtils.marshaller(GetLocationRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Location.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private final UnaryCallable<ListSecretsRequest, ListSecretsResponse> listSecretsCallable; private final UnaryCallable<ListSecretsRequest, ListSecretsPagedResponse> listSecretsPagedCallable; private final UnaryCallable<CreateSecretRequest, Secret> createSecretCallable; private final UnaryCallable<AddSecretVersionRequest, SecretVersion> addSecretVersionCallable; private final UnaryCallable<GetSecretRequest, Secret> getSecretCallable; private final UnaryCallable<UpdateSecretRequest, Secret> updateSecretCallable; private final UnaryCallable<DeleteSecretRequest, Empty> deleteSecretCallable; private final UnaryCallable<ListSecretVersionsRequest, ListSecretVersionsResponse> listSecretVersionsCallable; private final UnaryCallable<ListSecretVersionsRequest, ListSecretVersionsPagedResponse> listSecretVersionsPagedCallable; private final UnaryCallable<GetSecretVersionRequest, SecretVersion> getSecretVersionCallable; private final UnaryCallable<AccessSecretVersionRequest, AccessSecretVersionResponse> accessSecretVersionCallable; private final UnaryCallable<DisableSecretVersionRequest, SecretVersion> disableSecretVersionCallable; private final UnaryCallable<EnableSecretVersionRequest, SecretVersion> enableSecretVersionCallable; private final UnaryCallable<DestroySecretVersionRequest, SecretVersion> destroySecretVersionCallable; private final UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable; private final UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable; private final UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsCallable; private final UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable; private final UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse> listLocationsPagedCallable; private final UnaryCallable<GetLocationRequest, Location> getLocationCallable; private final BackgroundResource backgroundResources; private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcSecretManagerServiceStub create(SecretManagerServiceStubSettings settings) throws IOException { return new GrpcSecretManagerServiceStub(settings, ClientContext.create(settings)); } public static final GrpcSecretManagerServiceStub create(ClientContext clientContext) throws IOException { return new GrpcSecretManagerServiceStub( SecretManagerServiceStubSettings.newBuilder().build(), clientContext); } public static final GrpcSecretManagerServiceStub create( ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { return new GrpcSecretManagerServiceStub( SecretManagerServiceStubSettings.newBuilder().build(), clientContext, callableFactory); } /** * Constructs an instance of GrpcSecretManagerServiceStub, using the given settings. This is * protected so that it is easy to make a subclass, but otherwise, the static factory methods * should be preferred. */ protected GrpcSecretManagerServiceStub( SecretManagerServiceStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new GrpcSecretManagerServiceCallableFactory()); } /** * Constructs an instance of GrpcSecretManagerServiceStub, using the given settings. This is * protected so that it is easy to make a subclass, but otherwise, the static factory methods * should be preferred. */ protected GrpcSecretManagerServiceStub( SecretManagerServiceStubSettings settings, ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); GrpcCallSettings<ListSecretsRequest, ListSecretsResponse> listSecretsTransportSettings = GrpcCallSettings.<ListSecretsRequest, ListSecretsResponse>newBuilder() .setMethodDescriptor(listSecretsMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); GrpcCallSettings<CreateSecretRequest, Secret> createSecretTransportSettings = GrpcCallSettings.<CreateSecretRequest, Secret>newBuilder() .setMethodDescriptor(createSecretMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); GrpcCallSettings<AddSecretVersionRequest, SecretVersion> addSecretVersionTransportSettings = GrpcCallSettings.<AddSecretVersionRequest, SecretVersion>newBuilder() .setMethodDescriptor(addSecretVersionMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); GrpcCallSettings<GetSecretRequest, Secret> getSecretTransportSettings = GrpcCallSettings.<GetSecretRequest, Secret>newBuilder() .setMethodDescriptor(getSecretMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); GrpcCallSettings<UpdateSecretRequest, Secret> updateSecretTransportSettings = GrpcCallSettings.<UpdateSecretRequest, Secret>newBuilder() .setMethodDescriptor(updateSecretMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("secret.name", String.valueOf(request.getSecret().getName())); return builder.build(); }) .build(); GrpcCallSettings<DeleteSecretRequest, Empty> deleteSecretTransportSettings = GrpcCallSettings.<DeleteSecretRequest, Empty>newBuilder() .setMethodDescriptor(deleteSecretMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); GrpcCallSettings<ListSecretVersionsRequest, ListSecretVersionsResponse> listSecretVersionsTransportSettings = GrpcCallSettings.<ListSecretVersionsRequest, ListSecretVersionsResponse>newBuilder() .setMethodDescriptor(listSecretVersionsMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); GrpcCallSettings<GetSecretVersionRequest, SecretVersion> getSecretVersionTransportSettings = GrpcCallSettings.<GetSecretVersionRequest, SecretVersion>newBuilder() .setMethodDescriptor(getSecretVersionMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); GrpcCallSettings<AccessSecretVersionRequest, AccessSecretVersionResponse> accessSecretVersionTransportSettings = GrpcCallSettings.<AccessSecretVersionRequest, AccessSecretVersionResponse>newBuilder() .setMethodDescriptor(accessSecretVersionMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); GrpcCallSettings<DisableSecretVersionRequest, SecretVersion> disableSecretVersionTransportSettings = GrpcCallSettings.<DisableSecretVersionRequest, SecretVersion>newBuilder() .setMethodDescriptor(disableSecretVersionMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); GrpcCallSettings<EnableSecretVersionRequest, SecretVersion> enableSecretVersionTransportSettings = GrpcCallSettings.<EnableSecretVersionRequest, SecretVersion>newBuilder() .setMethodDescriptor(enableSecretVersionMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); GrpcCallSettings<DestroySecretVersionRequest, SecretVersion> destroySecretVersionTransportSettings = GrpcCallSettings.<DestroySecretVersionRequest, SecretVersion>newBuilder() .setMethodDescriptor(destroySecretVersionMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); GrpcCallSettings<SetIamPolicyRequest, Policy> setIamPolicyTransportSettings = GrpcCallSettings.<SetIamPolicyRequest, Policy>newBuilder() .setMethodDescriptor(setIamPolicyMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("resource", String.valueOf(request.getResource())); return builder.build(); }) .build(); GrpcCallSettings<GetIamPolicyRequest, Policy> getIamPolicyTransportSettings = GrpcCallSettings.<GetIamPolicyRequest, Policy>newBuilder() .setMethodDescriptor(getIamPolicyMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("resource", String.valueOf(request.getResource())); return builder.build(); }) .build(); GrpcCallSettings<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsTransportSettings = GrpcCallSettings.<TestIamPermissionsRequest, TestIamPermissionsResponse>newBuilder() .setMethodDescriptor(testIamPermissionsMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("resource", String.valueOf(request.getResource())); return builder.build(); }) .build(); GrpcCallSettings<ListLocationsRequest, ListLocationsResponse> listLocationsTransportSettings = GrpcCallSettings.<ListLocationsRequest, ListLocationsResponse>newBuilder() .setMethodDescriptor(listLocationsMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); GrpcCallSettings<GetLocationRequest, Location> getLocationTransportSettings = GrpcCallSettings.<GetLocationRequest, Location>newBuilder() .setMethodDescriptor(getLocationMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); this.listSecretsCallable = callableFactory.createUnaryCallable( listSecretsTransportSettings, settings.listSecretsSettings(), clientContext); this.listSecretsPagedCallable = callableFactory.createPagedCallable( listSecretsTransportSettings, settings.listSecretsSettings(), clientContext); this.createSecretCallable = callableFactory.createUnaryCallable( createSecretTransportSettings, settings.createSecretSettings(), clientContext); this.addSecretVersionCallable = callableFactory.createUnaryCallable( addSecretVersionTransportSettings, settings.addSecretVersionSettings(), clientContext); this.getSecretCallable = callableFactory.createUnaryCallable( getSecretTransportSettings, settings.getSecretSettings(), clientContext); this.updateSecretCallable = callableFactory.createUnaryCallable( updateSecretTransportSettings, settings.updateSecretSettings(), clientContext); this.deleteSecretCallable = callableFactory.createUnaryCallable( deleteSecretTransportSettings, settings.deleteSecretSettings(), clientContext); this.listSecretVersionsCallable = callableFactory.createUnaryCallable( listSecretVersionsTransportSettings, settings.listSecretVersionsSettings(), clientContext); this.listSecretVersionsPagedCallable = callableFactory.createPagedCallable( listSecretVersionsTransportSettings, settings.listSecretVersionsSettings(), clientContext); this.getSecretVersionCallable = callableFactory.createUnaryCallable( getSecretVersionTransportSettings, settings.getSecretVersionSettings(), clientContext); this.accessSecretVersionCallable = callableFactory.createUnaryCallable( accessSecretVersionTransportSettings, settings.accessSecretVersionSettings(), clientContext); this.disableSecretVersionCallable = callableFactory.createUnaryCallable( disableSecretVersionTransportSettings, settings.disableSecretVersionSettings(), clientContext); this.enableSecretVersionCallable = callableFactory.createUnaryCallable( enableSecretVersionTransportSettings, settings.enableSecretVersionSettings(), clientContext); this.destroySecretVersionCallable = callableFactory.createUnaryCallable( destroySecretVersionTransportSettings, settings.destroySecretVersionSettings(), clientContext); this.setIamPolicyCallable = callableFactory.createUnaryCallable( setIamPolicyTransportSettings, settings.setIamPolicySettings(), clientContext); this.getIamPolicyCallable = callableFactory.createUnaryCallable( getIamPolicyTransportSettings, settings.getIamPolicySettings(), clientContext); this.testIamPermissionsCallable = callableFactory.createUnaryCallable( testIamPermissionsTransportSettings, settings.testIamPermissionsSettings(), clientContext); this.listLocationsCallable = callableFactory.createUnaryCallable( listLocationsTransportSettings, settings.listLocationsSettings(), clientContext); this.listLocationsPagedCallable = callableFactory.createPagedCallable( listLocationsTransportSettings, settings.listLocationsSettings(), clientContext); this.getLocationCallable = callableFactory.createUnaryCallable( getLocationTransportSettings, settings.getLocationSettings(), clientContext); this.backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } public GrpcOperationsStub getOperationsStub() { return operationsStub; } @Override public UnaryCallable<ListSecretsRequest, ListSecretsResponse> listSecretsCallable() { return listSecretsCallable; } @Override public UnaryCallable<ListSecretsRequest, ListSecretsPagedResponse> listSecretsPagedCallable() { return listSecretsPagedCallable; } @Override public UnaryCallable<CreateSecretRequest, Secret> createSecretCallable() { return createSecretCallable; } @Override public UnaryCallable<AddSecretVersionRequest, SecretVersion> addSecretVersionCallable() { return addSecretVersionCallable; } @Override public UnaryCallable<GetSecretRequest, Secret> getSecretCallable() { return getSecretCallable; } @Override public UnaryCallable<UpdateSecretRequest, Secret> updateSecretCallable() { return updateSecretCallable; } @Override public UnaryCallable<DeleteSecretRequest, Empty> deleteSecretCallable() { return deleteSecretCallable; } @Override public UnaryCallable<ListSecretVersionsRequest, ListSecretVersionsResponse> listSecretVersionsCallable() { return listSecretVersionsCallable; } @Override public UnaryCallable<ListSecretVersionsRequest, ListSecretVersionsPagedResponse> listSecretVersionsPagedCallable() { return listSecretVersionsPagedCallable; } @Override public UnaryCallable<GetSecretVersionRequest, SecretVersion> getSecretVersionCallable() { return getSecretVersionCallable; } @Override public UnaryCallable<AccessSecretVersionRequest, AccessSecretVersionResponse> accessSecretVersionCallable() { return accessSecretVersionCallable; } @Override public UnaryCallable<DisableSecretVersionRequest, SecretVersion> disableSecretVersionCallable() { return disableSecretVersionCallable; } @Override public UnaryCallable<EnableSecretVersionRequest, SecretVersion> enableSecretVersionCallable() { return enableSecretVersionCallable; } @Override public UnaryCallable<DestroySecretVersionRequest, SecretVersion> destroySecretVersionCallable() { return destroySecretVersionCallable; } @Override public UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable() { return setIamPolicyCallable; } @Override public UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable() { return getIamPolicyCallable; } @Override public UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsCallable() { return testIamPermissionsCallable; } @Override public UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable() { return listLocationsCallable; } @Override public UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse> listLocationsPagedCallable() { return listLocationsPagedCallable; } @Override public UnaryCallable<GetLocationRequest, Location> getLocationCallable() { return getLocationCallable; } @Override public final void close() { try { backgroundResources.close(); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new IllegalStateException("Failed to close resource", e); } } @Override public void shutdown() { backgroundResources.shutdown(); } @Override public boolean isShutdown() { return backgroundResources.isShutdown(); } @Override public boolean isTerminated() { return backgroundResources.isTerminated(); } @Override public void shutdownNow() { backgroundResources.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return backgroundResources.awaitTermination(duration, unit); } }
apache/flink-kubernetes-operator
35,300
flink-kubernetes-operator/src/test/java/org/apache/flink/kubernetes/operator/observer/JobStatusObserverTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.kubernetes.operator.observer; import org.apache.flink.api.common.JobID; import org.apache.flink.api.common.JobStatus; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.PipelineOptionsInternal; import org.apache.flink.kubernetes.operator.OperatorTestBase; import org.apache.flink.kubernetes.operator.TestUtils; import org.apache.flink.kubernetes.operator.api.AbstractFlinkResource; import org.apache.flink.kubernetes.operator.api.FlinkDeployment; import org.apache.flink.kubernetes.operator.api.FlinkSessionJob; import org.apache.flink.kubernetes.operator.api.spec.JobState; import org.apache.flink.kubernetes.operator.api.spec.UpgradeMode; import org.apache.flink.kubernetes.operator.config.KubernetesOperatorConfigOptions; import org.apache.flink.kubernetes.operator.controller.FlinkResourceContext; import org.apache.flink.kubernetes.operator.reconciler.ReconciliationUtils; import org.apache.flink.kubernetes.operator.utils.EventRecorder; import org.apache.flink.util.SerializedThrowable; import io.fabric8.kubernetes.client.KubernetesClient; import io.fabric8.kubernetes.client.server.mock.EnableKubernetesMockClient; import lombok.Getter; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.EnumSource; import org.junit.jupiter.params.provider.MethodSource; import java.time.Duration; import java.time.Instant; import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import java.util.stream.Stream; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; /** Tests for the {@link JobStatusObserver}. */ @EnableKubernetesMockClient(crud = true) public class JobStatusObserverTest extends OperatorTestBase { @Getter private KubernetesClient kubernetesClient; private JobStatusObserver<AbstractFlinkResource<?, ?>> observer; @Override protected void setup() { observer = new JobStatusObserver<>(eventRecorder); } @ParameterizedTest @MethodSource("cancellingArgs") void testCancellingToMissing( JobStatus fromStatus, UpgradeMode upgradeMode, JobState expectedAfter) { var job = initSessionJob(); job.getSpec().getJob().setUpgradeMode(upgradeMode); var status = job.getStatus(); var jobStatus = status.getJobStatus(); jobStatus.setState(fromStatus); assertEquals( JobState.RUNNING, status.getReconciliationStatus() .deserializeLastReconciledSpec() .getJob() .getState()); observer.observe( getResourceContext( job, TestUtils.createContextWithReadyFlinkDeployment(kubernetesClient))); assertEquals( JobStatusObserver.JOB_NOT_FOUND_ERR, flinkResourceEventCollector.events.poll().getMessage()); assertEquals( expectedAfter, status.getReconciliationStatus() .deserializeLastReconciledSpec() .getJob() .getState()); } @ParameterizedTest @EnumSource(value = JobStatus.class, mode = EnumSource.Mode.EXCLUDE, names = "CANCELED") void testCancellingToTerminal(JobStatus fromStatus) throws Exception { var observer = new JobStatusObserver<>(eventRecorder); var deployment = initDeployment(); var status = deployment.getStatus(); var jobStatus = status.getJobStatus(); jobStatus.setState(fromStatus); assertEquals( JobState.RUNNING, status.getReconciliationStatus() .deserializeLastReconciledSpec() .getJob() .getState()); FlinkResourceContext<AbstractFlinkResource<?, ?>> ctx = getResourceContext(deployment); flinkService.submitApplicationCluster( deployment.getSpec().getJob(), ctx.getDeployConfig(deployment.getSpec()), false); flinkService.cancelJob(JobID.fromHexString(jobStatus.getJobId()), false); observer.observe(ctx); assertEquals( EventRecorder.Reason.JobStatusChanged.name(), flinkResourceEventCollector.events.poll().getReason()); assertEquals( JobState.SUSPENDED, status.getReconciliationStatus() .deserializeLastReconciledSpec() .getJob() .getState()); } @Test void testFailed() throws Exception { var observer = new JobStatusObserver<>(eventRecorder); var deployment = initDeployment(); var status = deployment.getStatus(); var jobStatus = status.getJobStatus(); jobStatus.setState(JobStatus.RUNNING); FlinkResourceContext<AbstractFlinkResource<?, ?>> ctx = getResourceContext(deployment); flinkService.submitApplicationCluster( deployment.getSpec().getJob(), ctx.getDeployConfig(deployment.getSpec()), false); // Mark failed flinkService.setJobFailedErr( new Exception("job err", new SerializedThrowable(new Exception("root")))); observer.observe(ctx); // First event should be job error reported var jobErrorEvent = flinkResourceEventCollector.events.poll(); assertEquals(EventRecorder.Reason.Error.name(), jobErrorEvent.getReason()); assertEquals("job err -> root", jobErrorEvent.getMessage()); // Make sure job status still reported assertEquals( EventRecorder.Reason.JobStatusChanged.name(), flinkResourceEventCollector.events.poll().getReason()); observer.observe(ctx); assertTrue(flinkResourceEventCollector.events.isEmpty()); } @Test public void testExceptionObservedEvenWhenNewStateIsTerminal() throws Exception { var deployment = initDeployment(); var status = deployment.getStatus(); var jobStatus = status.getJobStatus(); jobStatus.setState(JobStatus.RUNNING); Map<String, String> configuration = new HashMap<>(); configuration.put( KubernetesOperatorConfigOptions.OPERATOR_EVENT_EXCEPTION_LIMIT.key(), "2"); Configuration operatorConfig = Configuration.fromMap(configuration); FlinkResourceContext<AbstractFlinkResource<?, ?>> ctx = getResourceContext(deployment, operatorConfig); var jobId = JobID.fromHexString(deployment.getStatus().getJobStatus().getJobId()); ctx.getExceptionCacheEntry().setInitialized(true); ctx.getExceptionCacheEntry().setJobId(jobId.toHexString()); ctx.getExceptionCacheEntry().setLastTimestamp(Instant.ofEpochMilli(500L)); flinkService.addExceptionHistory(jobId, "ExceptionOne", "trace1", 1000L); // Ensure jobFailedErr is null before the observe call flinkService.submitApplicationCluster( deployment.getSpec().getJob(), ctx.getDeployConfig(deployment.getSpec()), false); flinkService.cancelJob(JobID.fromHexString(jobStatus.getJobId()), false); flinkService.setJobFailedErr(null); observer.observe(ctx); var events = kubernetesClient .v1() .events() .inNamespace(deployment.getMetadata().getNamespace()) .list() .getItems(); assertEquals(2, events.size()); // one will be for job status changed // assert that none of the events contain JOB_NOT_FOUND_ERR assertFalse( events.stream() .anyMatch( event -> event.getMessage() .contains(JobStatusObserver.JOB_NOT_FOUND_ERR))); } @Test public void testExceptionNotObservedWhenOldStateIsTerminal() throws Exception { var deployment = initDeployment(); var status = deployment.getStatus(); var jobStatus = status.getJobStatus(); jobStatus.setState(JobStatus.CANCELED); Map<String, String> configuration = new HashMap<>(); configuration.put( KubernetesOperatorConfigOptions.OPERATOR_EVENT_EXCEPTION_LIMIT.key(), "2"); Configuration operatorConfig = Configuration.fromMap(configuration); FlinkResourceContext<AbstractFlinkResource<?, ?>> ctx = getResourceContext(deployment, operatorConfig); var jobId = JobID.fromHexString(deployment.getStatus().getJobStatus().getJobId()); ctx.getExceptionCacheEntry().setInitialized(true); ctx.getExceptionCacheEntry().setJobId(jobId.toHexString()); ctx.getExceptionCacheEntry().setLastTimestamp(Instant.ofEpochMilli(500L)); flinkService.addExceptionHistory(jobId, "ExceptionOne", "trace1", 1000L); // Ensure jobFailedErr is null before the observe call flinkService.submitApplicationCluster( deployment.getSpec().getJob(), ctx.getDeployConfig(deployment.getSpec()), false); flinkService.setJobFailedErr(null); observer.observe(ctx); var events = kubernetesClient .v1() .events() .inNamespace(deployment.getMetadata().getNamespace()) .list() .getItems(); assertEquals(1, events.size()); // only one event for job status changed assertEquals(EventRecorder.Reason.JobStatusChanged.name(), events.get(0).getReason()); } @Test public void testExceptionLimitConfig() throws Exception { var observer = new JobStatusObserver<>(eventRecorder); var deployment = initDeployment(); var status = deployment.getStatus(); var jobStatus = status.getJobStatus(); jobStatus.setState(JobStatus.RUNNING); Map<String, String> configuration = new HashMap<>(); configuration.put( KubernetesOperatorConfigOptions.OPERATOR_EVENT_EXCEPTION_LIMIT.key(), "2"); Configuration operatorConfig = Configuration.fromMap(configuration); FlinkResourceContext<AbstractFlinkResource<?, ?>> ctx = getResourceContext(deployment, operatorConfig); // set a non-terminal state var jobId = JobID.fromHexString(deployment.getStatus().getJobStatus().getJobId()); ctx.getExceptionCacheEntry().setInitialized(true); ctx.getExceptionCacheEntry().setJobId(jobId.toHexString()); ctx.getExceptionCacheEntry().setLastTimestamp(Instant.ofEpochMilli(500L)); flinkService.submitApplicationCluster( deployment.getSpec().getJob(), ctx.getDeployConfig(deployment.getSpec()), false); flinkService.addExceptionHistory(jobId, "ExceptionOne", "trace1", 1000L); flinkService.addExceptionHistory(jobId, "ExceptionTwo", "trace2", 2000L); flinkService.addExceptionHistory(jobId, "ExceptionThree", "trace3", 3000L); // Ensure jobFailedErr is null before the observe call flinkService.setJobFailedErr(null); observer.observe(ctx); var events = kubernetesClient .v1() .events() .inNamespace(deployment.getMetadata().getNamespace()) .list() .getItems(); assertEquals(2, events.size()); } @Test public void testStackTraceTruncationConfig() throws Exception { var deployment = initDeployment(); var status = deployment.getStatus(); var jobStatus = status.getJobStatus(); jobStatus.setState(JobStatus.RUNNING); Map<String, String> configuration = new HashMap<>(); configuration.put( KubernetesOperatorConfigOptions.OPERATOR_EVENT_EXCEPTION_STACKTRACE_LINES.key(), "2"); Configuration operatorConfig = Configuration.fromMap(configuration); FlinkResourceContext<AbstractFlinkResource<?, ?>> ctx = getResourceContext(deployment, operatorConfig); var jobId = JobID.fromHexString(deployment.getStatus().getJobStatus().getJobId()); flinkService.submitApplicationCluster( deployment.getSpec().getJob(), ctx.getDeployConfig(deployment.getSpec()), false); ReconciliationUtils.updateStatusForDeployedSpec(deployment, new Configuration()); ctx.getExceptionCacheEntry().setInitialized(true); ctx.getExceptionCacheEntry().setJobId(jobId.toHexString()); ctx.getExceptionCacheEntry().setLastTimestamp(Instant.ofEpochMilli(3000L)); long exceptionTime = 4000L; String longTrace = "line1\nline2\nline3\nline4"; flinkService.addExceptionHistory(jobId, "StackTraceCheck", longTrace, exceptionTime); // Ensure jobFailedErr is null before the observe call flinkService.setJobFailedErr(null); observer.observe(ctx); var events = kubernetesClient .v1() .events() .inNamespace(deployment.getMetadata().getNamespace()) .list() .getItems(); assertEquals(1, events.size()); String msg = events.get(0).getMessage(); assertTrue(msg.contains("line1")); assertTrue(msg.contains("line2")); assertFalse(msg.contains("line3")); assertTrue(msg.contains("... (2 more lines)")); } @Test public void testIgnoreOldExceptions() throws Exception { var deployment = initDeployment(); var status = deployment.getStatus(); var jobStatus = status.getJobStatus(); jobStatus.setState(JobStatus.RUNNING); // set a non-terminal state FlinkResourceContext<AbstractFlinkResource<?, ?>> ctx = getResourceContext(deployment); ctx.getExceptionCacheEntry().setInitialized(true); ctx.getExceptionCacheEntry().setJobId(deployment.getStatus().getJobStatus().getJobId()); ctx.getExceptionCacheEntry().setLastTimestamp(Instant.ofEpochMilli(2500L)); var jobId = JobID.fromHexString(deployment.getStatus().getJobStatus().getJobId()); flinkService.submitApplicationCluster( deployment.getSpec().getJob(), ctx.getDeployConfig(deployment.getSpec()), false); // Map exception names to timestamps Map<String, Long> exceptionHistory = Map.of( "OldException", 1000L, "MidException", 2000L, "NewException", 3000L); String dummyStackTrace = "org.apache.%s\n" + "\tat org.apache.flink.kubernetes.operator.observer.JobStatusObserverTest.testIgnoreOldExceptions(JobStatusObserverTest.java:1)\n" + "\tat java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n" + "\tat java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\n" + "\tat java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\n" + "\tat java.base/java.lang.reflect.Method.invoke(Method.java:566)\n"; // Add mapped exceptions exceptionHistory.forEach( (exceptionName, timestamp) -> { String fullStackTrace = String.format(dummyStackTrace, exceptionName); flinkService.addExceptionHistory( jobId, "org.apache." + exceptionName, fullStackTrace, timestamp); }); // Ensure jobFailedErr is null before the observe call flinkService.setJobFailedErr(null); observer.observe(ctx); var events = kubernetesClient .v1() .events() .inNamespace(deployment.getMetadata().getNamespace()) .list() .getItems(); assertEquals(1, events.size()); assertTrue(events.get(0).getMessage().contains("org.apache.NewException")); } @Test public void testExceptionEventTriggerInitialization() throws Exception { var deployment = initDeployment(); var status = deployment.getStatus(); var jobStatus = status.getJobStatus(); jobStatus.setState(JobStatus.RUNNING); // set a non-terminal state FlinkResourceContext<AbstractFlinkResource<?, ?>> ctx = getResourceContext(deployment); var now = Instant.now(); var jobId = JobID.fromHexString(deployment.getStatus().getJobStatus().getJobId()); flinkService.submitApplicationCluster( deployment.getSpec().getJob(), ctx.getDeployConfig(deployment.getSpec()), false); // Old exception that happened outside of kubernetes event retention should be ignored flinkService.addExceptionHistory( jobId, "OldException", "OldException", now.minus(Duration.ofHours(1)).toEpochMilli()); flinkService.addExceptionHistory( jobId, "NewException", "NewException", now.minus(Duration.ofMinutes(1)).toEpochMilli()); // Ensure jobFailedErr is null before the observe call flinkService.setJobFailedErr(null); observer.observe(ctx); var events = kubernetesClient .v1() .events() .inNamespace(deployment.getMetadata().getNamespace()) .list() .getItems(); assertEquals(1, events.size()); assertTrue(events.get(0).getMessage().contains("NewException")); assertTrue(ctx.getExceptionCacheEntry().isInitialized()); assertEquals( now.minus(Duration.ofMinutes(1)).truncatedTo(ChronoUnit.MILLIS), ctx.getExceptionCacheEntry().getLastTimestamp()); } @Test public void testSessionJobExceptionObservedEvenWhenNewStateIsTerminal() throws Exception { var sessionJob = initSessionJob(); var status = sessionJob.getStatus(); var jobStatus = status.getJobStatus(); jobStatus.setState(JobStatus.RUNNING); Map<String, String> configuration = new HashMap<>(); configuration.put( KubernetesOperatorConfigOptions.OPERATOR_EVENT_EXCEPTION_LIMIT.key(), "2"); Configuration operatorConfig = Configuration.fromMap(configuration); FlinkResourceContext<AbstractFlinkResource<?, ?>> ctx = getResourceContext( sessionJob, TestUtils.createContextWithReadyFlinkDeployment(kubernetesClient), operatorConfig); var jobId = JobID.fromHexString(sessionJob.getStatus().getJobStatus().getJobId()); ctx.getExceptionCacheEntry().setInitialized(true); ctx.getExceptionCacheEntry().setJobId(jobId.toHexString()); ctx.getExceptionCacheEntry().setLastTimestamp(Instant.ofEpochMilli(500L)); flinkService.addExceptionHistory(jobId, "SessionJobExceptionOne", "trace1", 1000L); // Submit the session job flinkService.submitJobToSessionCluster( sessionJob.getMetadata(), sessionJob.getSpec(), jobId, ctx.getDeployConfig(sessionJob.getSpec()), null); // Cancel the job to make it terminal flinkService.cancelJob(jobId, false); flinkService.setJobFailedErr(null); observer.observe(ctx); var events = kubernetesClient .v1() .events() .inNamespace(sessionJob.getMetadata().getNamespace()) .list() .getItems(); assertEquals(2, events.size()); // one will be for job status changed // assert that none of the events contain JOB_NOT_FOUND_ERR assertFalse( events.stream() .anyMatch( event -> event.getMessage() .contains(JobStatusObserver.JOB_NOT_FOUND_ERR))); } @Test public void testSessionJobExceptionNotObservedWhenOldStateIsTerminal() throws Exception { var sessionJob = initSessionJob(); var status = sessionJob.getStatus(); var jobStatus = status.getJobStatus(); jobStatus.setState(JobStatus.FINISHED); // Set to terminal state FlinkResourceContext<AbstractFlinkResource<?, ?>> ctx = getResourceContext( sessionJob, TestUtils.createContextWithReadyFlinkDeployment(kubernetesClient)); var jobId = JobID.fromHexString(sessionJob.getStatus().getJobStatus().getJobId()); flinkService.submitJobToSessionCluster( sessionJob.getMetadata(), sessionJob.getSpec(), jobId, ctx.getDeployConfig(sessionJob.getSpec()), null); ReconciliationUtils.updateStatusForDeployedSpec(sessionJob, new Configuration()); ctx.getExceptionCacheEntry().setInitialized(true); ctx.getExceptionCacheEntry().setJobId(jobId.toHexString()); ctx.getExceptionCacheEntry().setLastTimestamp(Instant.ofEpochMilli(3000L)); long exceptionTime = 4000L; flinkService.addExceptionHistory(jobId, "SessionJobException", "trace", exceptionTime); // Ensure jobFailedErr is null before the observe call flinkService.setJobFailedErr(null); observer.observe(ctx); var events = kubernetesClient .v1() .events() .inNamespace(sessionJob.getMetadata().getNamespace()) .list() .getItems(); assertEquals( 1, events.size()); // Only one event for job status changed, no exception events assertEquals(EventRecorder.Reason.JobStatusChanged.name(), events.get(0).getReason()); } @Test public void testSessionJobExceptionLimitConfig() throws Exception { var sessionJob = initSessionJob(); var status = sessionJob.getStatus(); var jobStatus = status.getJobStatus(); jobStatus.setState(JobStatus.RUNNING); Map<String, String> configuration = new HashMap<>(); configuration.put( KubernetesOperatorConfigOptions.OPERATOR_EVENT_EXCEPTION_LIMIT.key(), "2"); Configuration operatorConfig = Configuration.fromMap(configuration); FlinkResourceContext<AbstractFlinkResource<?, ?>> ctx = getResourceContext( sessionJob, TestUtils.createContextWithReadyFlinkDeployment(kubernetesClient), operatorConfig); var jobId = JobID.fromHexString(sessionJob.getStatus().getJobStatus().getJobId()); flinkService.submitJobToSessionCluster( sessionJob.getMetadata(), sessionJob.getSpec(), jobId, ctx.getDeployConfig(sessionJob.getSpec()), null); ReconciliationUtils.updateStatusForDeployedSpec(sessionJob, new Configuration()); ctx.getExceptionCacheEntry().setInitialized(true); ctx.getExceptionCacheEntry().setJobId(jobId.toHexString()); ctx.getExceptionCacheEntry().setLastTimestamp(Instant.ofEpochMilli(3000L)); // Add 3 exceptions, but only 2 should be reported due to limit flinkService.addExceptionHistory(jobId, "SessionJobException1", "trace1", 4000L); flinkService.addExceptionHistory(jobId, "SessionJobException2", "trace2", 5000L); flinkService.addExceptionHistory(jobId, "SessionJobException3", "trace3", 6000L); // Ensure jobFailedErr is null before the observe call flinkService.setJobFailedErr(null); observer.observe(ctx); var events = kubernetesClient .v1() .events() .inNamespace(sessionJob.getMetadata().getNamespace()) .list() .getItems(); assertEquals(2, events.size()); // Only 2 exceptions should be reported } @Test public void testSessionJobStackTraceTruncationConfig() throws Exception { var sessionJob = initSessionJob(); var status = sessionJob.getStatus(); var jobStatus = status.getJobStatus(); jobStatus.setState(JobStatus.RUNNING); Map<String, String> configuration = new HashMap<>(); configuration.put( KubernetesOperatorConfigOptions.OPERATOR_EVENT_EXCEPTION_STACKTRACE_LINES.key(), "2"); Configuration operatorConfig = Configuration.fromMap(configuration); FlinkResourceContext<AbstractFlinkResource<?, ?>> ctx = getResourceContext( sessionJob, TestUtils.createContextWithReadyFlinkDeployment(kubernetesClient), operatorConfig); var jobId = JobID.fromHexString(sessionJob.getStatus().getJobStatus().getJobId()); flinkService.submitJobToSessionCluster( sessionJob.getMetadata(), sessionJob.getSpec(), jobId, ctx.getDeployConfig(sessionJob.getSpec()), null); ReconciliationUtils.updateStatusForDeployedSpec(sessionJob, new Configuration()); ctx.getExceptionCacheEntry().setInitialized(true); ctx.getExceptionCacheEntry().setJobId(jobId.toHexString()); ctx.getExceptionCacheEntry().setLastTimestamp(Instant.ofEpochMilli(3000L)); long exceptionTime = 4000L; String longTrace = "line1\nline2\nline3\nline4"; flinkService.addExceptionHistory( jobId, "SessionJobStackTraceCheck", longTrace, exceptionTime); // Ensure jobFailedErr is null before the observe call flinkService.setJobFailedErr(null); observer.observe(ctx); var events = kubernetesClient .v1() .events() .inNamespace(sessionJob.getMetadata().getNamespace()) .list() .getItems(); assertEquals(1, events.size()); String msg = events.get(0).getMessage(); assertTrue(msg.contains("line1")); assertTrue(msg.contains("line2")); assertFalse(msg.contains("line3")); assertTrue(msg.contains("... (2 more lines)")); } @Test public void testSessionJobIgnoreOldExceptions() throws Exception { var sessionJob = initSessionJob(); var status = sessionJob.getStatus(); var jobStatus = status.getJobStatus(); jobStatus.setState(JobStatus.RUNNING); // set a non-terminal state FlinkResourceContext<AbstractFlinkResource<?, ?>> ctx = getResourceContext( sessionJob, TestUtils.createContextWithReadyFlinkDeployment(kubernetesClient)); ctx.getExceptionCacheEntry().setInitialized(true); ctx.getExceptionCacheEntry().setJobId(sessionJob.getStatus().getJobStatus().getJobId()); ctx.getExceptionCacheEntry().setLastTimestamp(Instant.ofEpochMilli(2500L)); var jobId = JobID.fromHexString(sessionJob.getStatus().getJobStatus().getJobId()); flinkService.submitJobToSessionCluster( sessionJob.getMetadata(), sessionJob.getSpec(), jobId, ctx.getDeployConfig(sessionJob.getSpec()), null); // Map exception names to timestamps Map<String, Long> exceptionHistory = Map.of( "OldSessionException", 1000L, "MidSessionException", 2000L, "NewSessionException", 3000L); String dummyStackTrace = "org.apache.%s\n" + "\tat org.apache.flink.kubernetes.operator.observer.JobStatusObserverTest.testSessionJobIgnoreOldExceptions(JobStatusObserverTest.java:1)\n" + "\tat java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n" + "\tat java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\n" + "\tat java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\n" + "\tat java.base/java.lang.reflect.Method.invoke(Method.java:566)\n"; // Add mapped exceptions exceptionHistory.forEach( (exceptionName, timestamp) -> { String fullStackTrace = String.format(dummyStackTrace, exceptionName); flinkService.addExceptionHistory( jobId, "org.apache." + exceptionName, fullStackTrace, timestamp); }); // Ensure jobFailedErr is null before the observe call flinkService.setJobFailedErr(null); observer.observe(ctx); var events = kubernetesClient .v1() .events() .inNamespace(sessionJob.getMetadata().getNamespace()) .list() .getItems(); assertEquals(1, events.size()); assertTrue(events.get(0).getMessage().contains("org.apache.NewSessionException")); } @Test public void testSessionJobExceptionEventTriggerInitialization() throws Exception { var sessionJob = initSessionJob(); var status = sessionJob.getStatus(); var jobStatus = status.getJobStatus(); jobStatus.setState(JobStatus.RUNNING); // set a non-terminal state FlinkResourceContext<AbstractFlinkResource<?, ?>> ctx = getResourceContext( sessionJob, TestUtils.createContextWithReadyFlinkDeployment(kubernetesClient)); var now = Instant.now(); var jobId = JobID.fromHexString(sessionJob.getStatus().getJobStatus().getJobId()); flinkService.submitJobToSessionCluster( sessionJob.getMetadata(), sessionJob.getSpec(), jobId, ctx.getDeployConfig(sessionJob.getSpec()), null); // Old exception that happened outside of kubernetes event retention should be ignored flinkService.addExceptionHistory( jobId, "OldSessionException", "OldSessionException", now.minus(Duration.ofHours(1)).toEpochMilli()); flinkService.addExceptionHistory( jobId, "NewSessionException", "NewSessionException", now.minus(Duration.ofMinutes(1)).toEpochMilli()); // Ensure jobFailedErr is null before the observe call flinkService.setJobFailedErr(null); observer.observe(ctx); var events = kubernetesClient .v1() .events() .inNamespace(sessionJob.getMetadata().getNamespace()) .list() .getItems(); assertEquals(1, events.size()); assertTrue(events.get(0).getMessage().contains("NewSessionException")); assertTrue(ctx.getExceptionCacheEntry().isInitialized()); assertEquals( now.minus(Duration.ofMinutes(1)).truncatedTo(ChronoUnit.MILLIS), ctx.getExceptionCacheEntry().getLastTimestamp()); } private static Stream<Arguments> cancellingArgs() { var args = new ArrayList<Arguments>(); for (var status : JobStatus.values()) { for (var upgradeMode : UpgradeMode.values()) { args.add( Arguments.of( status, upgradeMode, upgradeMode == UpgradeMode.STATELESS && !status.isGloballyTerminalState() ? JobState.SUSPENDED : JobState.RUNNING)); } } return args.stream(); } private static FlinkDeployment initDeployment() { FlinkDeployment deployment = TestUtils.buildApplicationCluster(); var jobId = new JobID().toHexString(); deployment .getSpec() .getFlinkConfiguration() .put(PipelineOptionsInternal.PIPELINE_FIXED_JOB_ID.key(), jobId); deployment.getStatus().getJobStatus().setJobId(jobId); deployment .getStatus() .getReconciliationStatus() .serializeAndSetLastReconciledSpec(deployment.getSpec(), deployment); return deployment; } private static FlinkSessionJob initSessionJob() { var job = TestUtils.buildSessionJob(); var jobId = new JobID().toHexString(); job.getSpec() .getFlinkConfiguration() .put(PipelineOptionsInternal.PIPELINE_FIXED_JOB_ID.key(), jobId); job.getStatus().getJobStatus().setJobId(jobId); job.getStatus() .getReconciliationStatus() .serializeAndSetLastReconciledSpec(job.getSpec(), job); return job; } }
googleapis/google-cloud-java
35,169
java-vision/proto-google-cloud-vision-v1p3beta1/src/main/java/com/google/cloud/vision/v1p3beta1/UpdateProductSetRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/vision/v1p3beta1/product_search_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.vision.v1p3beta1; /** * * * <pre> * Request message for the `UpdateProductSet` method. * </pre> * * Protobuf type {@code google.cloud.vision.v1p3beta1.UpdateProductSetRequest} */ public final class UpdateProductSetRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.vision.v1p3beta1.UpdateProductSetRequest) UpdateProductSetRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateProductSetRequest.newBuilder() to construct. private UpdateProductSetRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateProductSetRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateProductSetRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.vision.v1p3beta1.ProductSearchServiceProto .internal_static_google_cloud_vision_v1p3beta1_UpdateProductSetRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.vision.v1p3beta1.ProductSearchServiceProto .internal_static_google_cloud_vision_v1p3beta1_UpdateProductSetRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest.class, com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest.Builder.class); } private int bitField0_; public static final int PRODUCT_SET_FIELD_NUMBER = 1; private com.google.cloud.vision.v1p3beta1.ProductSet productSet_; /** * * * <pre> * Required. The ProductSet resource which replaces the one on the server. * </pre> * * <code> * .google.cloud.vision.v1p3beta1.ProductSet product_set = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the productSet field is set. */ @java.lang.Override public boolean hasProductSet() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The ProductSet resource which replaces the one on the server. * </pre> * * <code> * .google.cloud.vision.v1p3beta1.ProductSet product_set = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The productSet. */ @java.lang.Override public com.google.cloud.vision.v1p3beta1.ProductSet getProductSet() { return productSet_ == null ? com.google.cloud.vision.v1p3beta1.ProductSet.getDefaultInstance() : productSet_; } /** * * * <pre> * Required. The ProductSet resource which replaces the one on the server. * </pre> * * <code> * .google.cloud.vision.v1p3beta1.ProductSet product_set = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.vision.v1p3beta1.ProductSetOrBuilder getProductSetOrBuilder() { return productSet_ == null ? com.google.cloud.vision.v1p3beta1.ProductSet.getDefaultInstance() : productSet_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields to * update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask path is `display_name`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields to * update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask path is `display_name`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields to * update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask path is `display_name`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getProductSet()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getProductSet()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest)) { return super.equals(obj); } com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest other = (com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest) obj; if (hasProductSet() != other.hasProductSet()) return false; if (hasProductSet()) { if (!getProductSet().equals(other.getProductSet())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasProductSet()) { hash = (37 * hash) + PRODUCT_SET_FIELD_NUMBER; hash = (53 * hash) + getProductSet().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for the `UpdateProductSet` method. * </pre> * * Protobuf type {@code google.cloud.vision.v1p3beta1.UpdateProductSetRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.vision.v1p3beta1.UpdateProductSetRequest) com.google.cloud.vision.v1p3beta1.UpdateProductSetRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.vision.v1p3beta1.ProductSearchServiceProto .internal_static_google_cloud_vision_v1p3beta1_UpdateProductSetRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.vision.v1p3beta1.ProductSearchServiceProto .internal_static_google_cloud_vision_v1p3beta1_UpdateProductSetRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest.class, com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest.Builder.class); } // Construct using com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getProductSetFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; productSet_ = null; if (productSetBuilder_ != null) { productSetBuilder_.dispose(); productSetBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.vision.v1p3beta1.ProductSearchServiceProto .internal_static_google_cloud_vision_v1p3beta1_UpdateProductSetRequest_descriptor; } @java.lang.Override public com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest getDefaultInstanceForType() { return com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest build() { com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest buildPartial() { com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest result = new com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.productSet_ = productSetBuilder_ == null ? productSet_ : productSetBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest) { return mergeFrom((com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest other) { if (other == com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest.getDefaultInstance()) return this; if (other.hasProductSet()) { mergeProductSet(other.getProductSet()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getProductSetFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.vision.v1p3beta1.ProductSet productSet_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.vision.v1p3beta1.ProductSet, com.google.cloud.vision.v1p3beta1.ProductSet.Builder, com.google.cloud.vision.v1p3beta1.ProductSetOrBuilder> productSetBuilder_; /** * * * <pre> * Required. The ProductSet resource which replaces the one on the server. * </pre> * * <code> * .google.cloud.vision.v1p3beta1.ProductSet product_set = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the productSet field is set. */ public boolean hasProductSet() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The ProductSet resource which replaces the one on the server. * </pre> * * <code> * .google.cloud.vision.v1p3beta1.ProductSet product_set = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The productSet. */ public com.google.cloud.vision.v1p3beta1.ProductSet getProductSet() { if (productSetBuilder_ == null) { return productSet_ == null ? com.google.cloud.vision.v1p3beta1.ProductSet.getDefaultInstance() : productSet_; } else { return productSetBuilder_.getMessage(); } } /** * * * <pre> * Required. The ProductSet resource which replaces the one on the server. * </pre> * * <code> * .google.cloud.vision.v1p3beta1.ProductSet product_set = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setProductSet(com.google.cloud.vision.v1p3beta1.ProductSet value) { if (productSetBuilder_ == null) { if (value == null) { throw new NullPointerException(); } productSet_ = value; } else { productSetBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The ProductSet resource which replaces the one on the server. * </pre> * * <code> * .google.cloud.vision.v1p3beta1.ProductSet product_set = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setProductSet( com.google.cloud.vision.v1p3beta1.ProductSet.Builder builderForValue) { if (productSetBuilder_ == null) { productSet_ = builderForValue.build(); } else { productSetBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The ProductSet resource which replaces the one on the server. * </pre> * * <code> * .google.cloud.vision.v1p3beta1.ProductSet product_set = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeProductSet(com.google.cloud.vision.v1p3beta1.ProductSet value) { if (productSetBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && productSet_ != null && productSet_ != com.google.cloud.vision.v1p3beta1.ProductSet.getDefaultInstance()) { getProductSetBuilder().mergeFrom(value); } else { productSet_ = value; } } else { productSetBuilder_.mergeFrom(value); } if (productSet_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The ProductSet resource which replaces the one on the server. * </pre> * * <code> * .google.cloud.vision.v1p3beta1.ProductSet product_set = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearProductSet() { bitField0_ = (bitField0_ & ~0x00000001); productSet_ = null; if (productSetBuilder_ != null) { productSetBuilder_.dispose(); productSetBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The ProductSet resource which replaces the one on the server. * </pre> * * <code> * .google.cloud.vision.v1p3beta1.ProductSet product_set = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.vision.v1p3beta1.ProductSet.Builder getProductSetBuilder() { bitField0_ |= 0x00000001; onChanged(); return getProductSetFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The ProductSet resource which replaces the one on the server. * </pre> * * <code> * .google.cloud.vision.v1p3beta1.ProductSet product_set = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.vision.v1p3beta1.ProductSetOrBuilder getProductSetOrBuilder() { if (productSetBuilder_ != null) { return productSetBuilder_.getMessageOrBuilder(); } else { return productSet_ == null ? com.google.cloud.vision.v1p3beta1.ProductSet.getDefaultInstance() : productSet_; } } /** * * * <pre> * Required. The ProductSet resource which replaces the one on the server. * </pre> * * <code> * .google.cloud.vision.v1p3beta1.ProductSet product_set = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.vision.v1p3beta1.ProductSet, com.google.cloud.vision.v1p3beta1.ProductSet.Builder, com.google.cloud.vision.v1p3beta1.ProductSetOrBuilder> getProductSetFieldBuilder() { if (productSetBuilder_ == null) { productSetBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.vision.v1p3beta1.ProductSet, com.google.cloud.vision.v1p3beta1.ProductSet.Builder, com.google.cloud.vision.v1p3beta1.ProductSetOrBuilder>( getProductSet(), getParentForChildren(), isClean()); productSet_ = null; } return productSetBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields to * update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask path is `display_name`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields to * update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask path is `display_name`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields to * update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask path is `display_name`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields to * update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask path is `display_name`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields to * update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask path is `display_name`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields to * update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask path is `display_name`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields to * update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask path is `display_name`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields to * update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask path is `display_name`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields to * update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask path is `display_name`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.vision.v1p3beta1.UpdateProductSetRequest) } // @@protoc_insertion_point(class_scope:google.cloud.vision.v1p3beta1.UpdateProductSetRequest) private static final com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest(); } public static com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateProductSetRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateProductSetRequest>() { @java.lang.Override public UpdateProductSetRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateProductSetRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateProductSetRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.vision.v1p3beta1.UpdateProductSetRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,169
java-vision/proto-google-cloud-vision-v1p4beta1/src/main/java/com/google/cloud/vision/v1p4beta1/UpdateProductSetRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/vision/v1p4beta1/product_search_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.vision.v1p4beta1; /** * * * <pre> * Request message for the `UpdateProductSet` method. * </pre> * * Protobuf type {@code google.cloud.vision.v1p4beta1.UpdateProductSetRequest} */ public final class UpdateProductSetRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.vision.v1p4beta1.UpdateProductSetRequest) UpdateProductSetRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateProductSetRequest.newBuilder() to construct. private UpdateProductSetRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateProductSetRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateProductSetRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.vision.v1p4beta1.ProductSearchServiceProto .internal_static_google_cloud_vision_v1p4beta1_UpdateProductSetRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.vision.v1p4beta1.ProductSearchServiceProto .internal_static_google_cloud_vision_v1p4beta1_UpdateProductSetRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest.class, com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest.Builder.class); } private int bitField0_; public static final int PRODUCT_SET_FIELD_NUMBER = 1; private com.google.cloud.vision.v1p4beta1.ProductSet productSet_; /** * * * <pre> * Required. The ProductSet resource which replaces the one on the server. * </pre> * * <code> * .google.cloud.vision.v1p4beta1.ProductSet product_set = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the productSet field is set. */ @java.lang.Override public boolean hasProductSet() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The ProductSet resource which replaces the one on the server. * </pre> * * <code> * .google.cloud.vision.v1p4beta1.ProductSet product_set = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The productSet. */ @java.lang.Override public com.google.cloud.vision.v1p4beta1.ProductSet getProductSet() { return productSet_ == null ? com.google.cloud.vision.v1p4beta1.ProductSet.getDefaultInstance() : productSet_; } /** * * * <pre> * Required. The ProductSet resource which replaces the one on the server. * </pre> * * <code> * .google.cloud.vision.v1p4beta1.ProductSet product_set = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.vision.v1p4beta1.ProductSetOrBuilder getProductSetOrBuilder() { return productSet_ == null ? com.google.cloud.vision.v1p4beta1.ProductSet.getDefaultInstance() : productSet_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields to * update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask path is `display_name`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields to * update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask path is `display_name`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields to * update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask path is `display_name`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getProductSet()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getProductSet()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest)) { return super.equals(obj); } com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest other = (com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest) obj; if (hasProductSet() != other.hasProductSet()) return false; if (hasProductSet()) { if (!getProductSet().equals(other.getProductSet())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasProductSet()) { hash = (37 * hash) + PRODUCT_SET_FIELD_NUMBER; hash = (53 * hash) + getProductSet().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for the `UpdateProductSet` method. * </pre> * * Protobuf type {@code google.cloud.vision.v1p4beta1.UpdateProductSetRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.vision.v1p4beta1.UpdateProductSetRequest) com.google.cloud.vision.v1p4beta1.UpdateProductSetRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.vision.v1p4beta1.ProductSearchServiceProto .internal_static_google_cloud_vision_v1p4beta1_UpdateProductSetRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.vision.v1p4beta1.ProductSearchServiceProto .internal_static_google_cloud_vision_v1p4beta1_UpdateProductSetRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest.class, com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest.Builder.class); } // Construct using com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getProductSetFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; productSet_ = null; if (productSetBuilder_ != null) { productSetBuilder_.dispose(); productSetBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.vision.v1p4beta1.ProductSearchServiceProto .internal_static_google_cloud_vision_v1p4beta1_UpdateProductSetRequest_descriptor; } @java.lang.Override public com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest getDefaultInstanceForType() { return com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest build() { com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest buildPartial() { com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest result = new com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.productSet_ = productSetBuilder_ == null ? productSet_ : productSetBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest) { return mergeFrom((com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest other) { if (other == com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest.getDefaultInstance()) return this; if (other.hasProductSet()) { mergeProductSet(other.getProductSet()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getProductSetFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.vision.v1p4beta1.ProductSet productSet_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.vision.v1p4beta1.ProductSet, com.google.cloud.vision.v1p4beta1.ProductSet.Builder, com.google.cloud.vision.v1p4beta1.ProductSetOrBuilder> productSetBuilder_; /** * * * <pre> * Required. The ProductSet resource which replaces the one on the server. * </pre> * * <code> * .google.cloud.vision.v1p4beta1.ProductSet product_set = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the productSet field is set. */ public boolean hasProductSet() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The ProductSet resource which replaces the one on the server. * </pre> * * <code> * .google.cloud.vision.v1p4beta1.ProductSet product_set = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The productSet. */ public com.google.cloud.vision.v1p4beta1.ProductSet getProductSet() { if (productSetBuilder_ == null) { return productSet_ == null ? com.google.cloud.vision.v1p4beta1.ProductSet.getDefaultInstance() : productSet_; } else { return productSetBuilder_.getMessage(); } } /** * * * <pre> * Required. The ProductSet resource which replaces the one on the server. * </pre> * * <code> * .google.cloud.vision.v1p4beta1.ProductSet product_set = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setProductSet(com.google.cloud.vision.v1p4beta1.ProductSet value) { if (productSetBuilder_ == null) { if (value == null) { throw new NullPointerException(); } productSet_ = value; } else { productSetBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The ProductSet resource which replaces the one on the server. * </pre> * * <code> * .google.cloud.vision.v1p4beta1.ProductSet product_set = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setProductSet( com.google.cloud.vision.v1p4beta1.ProductSet.Builder builderForValue) { if (productSetBuilder_ == null) { productSet_ = builderForValue.build(); } else { productSetBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The ProductSet resource which replaces the one on the server. * </pre> * * <code> * .google.cloud.vision.v1p4beta1.ProductSet product_set = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeProductSet(com.google.cloud.vision.v1p4beta1.ProductSet value) { if (productSetBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && productSet_ != null && productSet_ != com.google.cloud.vision.v1p4beta1.ProductSet.getDefaultInstance()) { getProductSetBuilder().mergeFrom(value); } else { productSet_ = value; } } else { productSetBuilder_.mergeFrom(value); } if (productSet_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The ProductSet resource which replaces the one on the server. * </pre> * * <code> * .google.cloud.vision.v1p4beta1.ProductSet product_set = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearProductSet() { bitField0_ = (bitField0_ & ~0x00000001); productSet_ = null; if (productSetBuilder_ != null) { productSetBuilder_.dispose(); productSetBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The ProductSet resource which replaces the one on the server. * </pre> * * <code> * .google.cloud.vision.v1p4beta1.ProductSet product_set = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.vision.v1p4beta1.ProductSet.Builder getProductSetBuilder() { bitField0_ |= 0x00000001; onChanged(); return getProductSetFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The ProductSet resource which replaces the one on the server. * </pre> * * <code> * .google.cloud.vision.v1p4beta1.ProductSet product_set = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.vision.v1p4beta1.ProductSetOrBuilder getProductSetOrBuilder() { if (productSetBuilder_ != null) { return productSetBuilder_.getMessageOrBuilder(); } else { return productSet_ == null ? com.google.cloud.vision.v1p4beta1.ProductSet.getDefaultInstance() : productSet_; } } /** * * * <pre> * Required. The ProductSet resource which replaces the one on the server. * </pre> * * <code> * .google.cloud.vision.v1p4beta1.ProductSet product_set = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.vision.v1p4beta1.ProductSet, com.google.cloud.vision.v1p4beta1.ProductSet.Builder, com.google.cloud.vision.v1p4beta1.ProductSetOrBuilder> getProductSetFieldBuilder() { if (productSetBuilder_ == null) { productSetBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.vision.v1p4beta1.ProductSet, com.google.cloud.vision.v1p4beta1.ProductSet.Builder, com.google.cloud.vision.v1p4beta1.ProductSetOrBuilder>( getProductSet(), getParentForChildren(), isClean()); productSet_ = null; } return productSetBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields to * update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask path is `display_name`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields to * update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask path is `display_name`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields to * update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask path is `display_name`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields to * update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask path is `display_name`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields to * update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask path is `display_name`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields to * update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask path is `display_name`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields to * update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask path is `display_name`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields to * update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask path is `display_name`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields to * update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask path is `display_name`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.vision.v1p4beta1.UpdateProductSetRequest) } // @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.UpdateProductSetRequest) private static final com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest(); } public static com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateProductSetRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateProductSetRequest>() { @java.lang.Override public UpdateProductSetRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateProductSetRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateProductSetRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.vision.v1p4beta1.UpdateProductSetRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }