repo_id
stringclasses
875 values
size
int64
974
38.9k
file_path
stringlengths
10
308
content
stringlengths
974
38.9k
googleapis/sdk-platform-java
35,099
java-common-protos/proto-google-common-protos/src/main/java/com/google/api/DocumentationRule.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/api/documentation.proto // Protobuf Java Version: 3.25.8 package com.google.api; /** * * * <pre> * A documentation rule provides information about individual API elements. * </pre> * * Protobuf type {@code google.api.DocumentationRule} */ public final class DocumentationRule extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.api.DocumentationRule) DocumentationRuleOrBuilder { private static final long serialVersionUID = 0L; // Use DocumentationRule.newBuilder() to construct. private DocumentationRule(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DocumentationRule() { selector_ = ""; description_ = ""; deprecationDescription_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new DocumentationRule(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.api.DocumentationProto .internal_static_google_api_DocumentationRule_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.api.DocumentationProto .internal_static_google_api_DocumentationRule_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.api.DocumentationRule.class, com.google.api.DocumentationRule.Builder.class); } public static final int SELECTOR_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object selector_ = ""; /** * * * <pre> * The selector is a comma-separated list of patterns for any element such as * a method, a field, an enum value. Each pattern is a qualified name of the * element which may end in "*", indicating a wildcard. Wildcards are only * allowed at the end and for a whole component of the qualified name, * i.e. "foo.*" is ok, but not "foo.b*" or "foo.*.bar". A wildcard will match * one or more components. To specify a default for all applicable elements, * the whole pattern "*" is used. * </pre> * * <code>string selector = 1;</code> * * @return The selector. */ @java.lang.Override public java.lang.String getSelector() { java.lang.Object ref = selector_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); selector_ = s; return s; } } /** * * * <pre> * The selector is a comma-separated list of patterns for any element such as * a method, a field, an enum value. Each pattern is a qualified name of the * element which may end in "*", indicating a wildcard. Wildcards are only * allowed at the end and for a whole component of the qualified name, * i.e. "foo.*" is ok, but not "foo.b*" or "foo.*.bar". A wildcard will match * one or more components. To specify a default for all applicable elements, * the whole pattern "*" is used. * </pre> * * <code>string selector = 1;</code> * * @return The bytes for selector. */ @java.lang.Override public com.google.protobuf.ByteString getSelectorBytes() { java.lang.Object ref = selector_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); selector_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int DESCRIPTION_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object description_ = ""; /** * * * <pre> * Description of the selected proto element (e.g. a message, a method, a * 'service' definition, or a field). Defaults to leading &amp; trailing comments * taken from the proto source definition of the proto element. * </pre> * * <code>string description = 2;</code> * * @return The description. */ @java.lang.Override public java.lang.String getDescription() { java.lang.Object ref = description_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); description_ = s; return s; } } /** * * * <pre> * Description of the selected proto element (e.g. a message, a method, a * 'service' definition, or a field). Defaults to leading &amp; trailing comments * taken from the proto source definition of the proto element. * </pre> * * <code>string description = 2;</code> * * @return The bytes for description. */ @java.lang.Override public com.google.protobuf.ByteString getDescriptionBytes() { java.lang.Object ref = description_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); description_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int DEPRECATION_DESCRIPTION_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object deprecationDescription_ = ""; /** * * * <pre> * Deprecation description of the selected element(s). It can be provided if * an element is marked as `deprecated`. * </pre> * * <code>string deprecation_description = 3;</code> * * @return The deprecationDescription. */ @java.lang.Override public java.lang.String getDeprecationDescription() { java.lang.Object ref = deprecationDescription_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); deprecationDescription_ = s; return s; } } /** * * * <pre> * Deprecation description of the selected element(s). It can be provided if * an element is marked as `deprecated`. * </pre> * * <code>string deprecation_description = 3;</code> * * @return The bytes for deprecationDescription. */ @java.lang.Override public com.google.protobuf.ByteString getDeprecationDescriptionBytes() { java.lang.Object ref = deprecationDescription_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); deprecationDescription_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(selector_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, selector_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(description_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, description_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(deprecationDescription_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, deprecationDescription_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(selector_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, selector_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(description_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, description_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(deprecationDescription_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, deprecationDescription_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.api.DocumentationRule)) { return super.equals(obj); } com.google.api.DocumentationRule other = (com.google.api.DocumentationRule) obj; if (!getSelector().equals(other.getSelector())) return false; if (!getDescription().equals(other.getDescription())) return false; if (!getDeprecationDescription().equals(other.getDeprecationDescription())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + SELECTOR_FIELD_NUMBER; hash = (53 * hash) + getSelector().hashCode(); hash = (37 * hash) + DESCRIPTION_FIELD_NUMBER; hash = (53 * hash) + getDescription().hashCode(); hash = (37 * hash) + DEPRECATION_DESCRIPTION_FIELD_NUMBER; hash = (53 * hash) + getDeprecationDescription().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.api.DocumentationRule parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.api.DocumentationRule parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.api.DocumentationRule parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.api.DocumentationRule parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.api.DocumentationRule parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.api.DocumentationRule parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.api.DocumentationRule parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.api.DocumentationRule parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.api.DocumentationRule parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.api.DocumentationRule parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.api.DocumentationRule parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.api.DocumentationRule parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.api.DocumentationRule prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A documentation rule provides information about individual API elements. * </pre> * * Protobuf type {@code google.api.DocumentationRule} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.api.DocumentationRule) com.google.api.DocumentationRuleOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.api.DocumentationProto .internal_static_google_api_DocumentationRule_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.api.DocumentationProto .internal_static_google_api_DocumentationRule_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.api.DocumentationRule.class, com.google.api.DocumentationRule.Builder.class); } // Construct using com.google.api.DocumentationRule.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; selector_ = ""; description_ = ""; deprecationDescription_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.api.DocumentationProto .internal_static_google_api_DocumentationRule_descriptor; } @java.lang.Override public com.google.api.DocumentationRule getDefaultInstanceForType() { return com.google.api.DocumentationRule.getDefaultInstance(); } @java.lang.Override public com.google.api.DocumentationRule build() { com.google.api.DocumentationRule result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.api.DocumentationRule buildPartial() { com.google.api.DocumentationRule result = new com.google.api.DocumentationRule(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.api.DocumentationRule result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.selector_ = selector_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.description_ = description_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.deprecationDescription_ = deprecationDescription_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.api.DocumentationRule) { return mergeFrom((com.google.api.DocumentationRule) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.api.DocumentationRule other) { if (other == com.google.api.DocumentationRule.getDefaultInstance()) return this; if (!other.getSelector().isEmpty()) { selector_ = other.selector_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getDescription().isEmpty()) { description_ = other.description_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getDeprecationDescription().isEmpty()) { deprecationDescription_ = other.deprecationDescription_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { selector_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { description_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { deprecationDescription_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object selector_ = ""; /** * * * <pre> * The selector is a comma-separated list of patterns for any element such as * a method, a field, an enum value. Each pattern is a qualified name of the * element which may end in "*", indicating a wildcard. Wildcards are only * allowed at the end and for a whole component of the qualified name, * i.e. "foo.*" is ok, but not "foo.b*" or "foo.*.bar". A wildcard will match * one or more components. To specify a default for all applicable elements, * the whole pattern "*" is used. * </pre> * * <code>string selector = 1;</code> * * @return The selector. */ public java.lang.String getSelector() { java.lang.Object ref = selector_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); selector_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The selector is a comma-separated list of patterns for any element such as * a method, a field, an enum value. Each pattern is a qualified name of the * element which may end in "*", indicating a wildcard. Wildcards are only * allowed at the end and for a whole component of the qualified name, * i.e. "foo.*" is ok, but not "foo.b*" or "foo.*.bar". A wildcard will match * one or more components. To specify a default for all applicable elements, * the whole pattern "*" is used. * </pre> * * <code>string selector = 1;</code> * * @return The bytes for selector. */ public com.google.protobuf.ByteString getSelectorBytes() { java.lang.Object ref = selector_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); selector_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The selector is a comma-separated list of patterns for any element such as * a method, a field, an enum value. Each pattern is a qualified name of the * element which may end in "*", indicating a wildcard. Wildcards are only * allowed at the end and for a whole component of the qualified name, * i.e. "foo.*" is ok, but not "foo.b*" or "foo.*.bar". A wildcard will match * one or more components. To specify a default for all applicable elements, * the whole pattern "*" is used. * </pre> * * <code>string selector = 1;</code> * * @param value The selector to set. * @return This builder for chaining. */ public Builder setSelector(java.lang.String value) { if (value == null) { throw new NullPointerException(); } selector_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The selector is a comma-separated list of patterns for any element such as * a method, a field, an enum value. Each pattern is a qualified name of the * element which may end in "*", indicating a wildcard. Wildcards are only * allowed at the end and for a whole component of the qualified name, * i.e. "foo.*" is ok, but not "foo.b*" or "foo.*.bar". A wildcard will match * one or more components. To specify a default for all applicable elements, * the whole pattern "*" is used. * </pre> * * <code>string selector = 1;</code> * * @return This builder for chaining. */ public Builder clearSelector() { selector_ = getDefaultInstance().getSelector(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * The selector is a comma-separated list of patterns for any element such as * a method, a field, an enum value. Each pattern is a qualified name of the * element which may end in "*", indicating a wildcard. Wildcards are only * allowed at the end and for a whole component of the qualified name, * i.e. "foo.*" is ok, but not "foo.b*" or "foo.*.bar". A wildcard will match * one or more components. To specify a default for all applicable elements, * the whole pattern "*" is used. * </pre> * * <code>string selector = 1;</code> * * @param value The bytes for selector to set. * @return This builder for chaining. */ public Builder setSelectorBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); selector_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object description_ = ""; /** * * * <pre> * Description of the selected proto element (e.g. a message, a method, a * 'service' definition, or a field). Defaults to leading &amp; trailing comments * taken from the proto source definition of the proto element. * </pre> * * <code>string description = 2;</code> * * @return The description. */ public java.lang.String getDescription() { java.lang.Object ref = description_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); description_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Description of the selected proto element (e.g. a message, a method, a * 'service' definition, or a field). Defaults to leading &amp; trailing comments * taken from the proto source definition of the proto element. * </pre> * * <code>string description = 2;</code> * * @return The bytes for description. */ public com.google.protobuf.ByteString getDescriptionBytes() { java.lang.Object ref = description_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); description_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Description of the selected proto element (e.g. a message, a method, a * 'service' definition, or a field). Defaults to leading &amp; trailing comments * taken from the proto source definition of the proto element. * </pre> * * <code>string description = 2;</code> * * @param value The description to set. * @return This builder for chaining. */ public Builder setDescription(java.lang.String value) { if (value == null) { throw new NullPointerException(); } description_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Description of the selected proto element (e.g. a message, a method, a * 'service' definition, or a field). Defaults to leading &amp; trailing comments * taken from the proto source definition of the proto element. * </pre> * * <code>string description = 2;</code> * * @return This builder for chaining. */ public Builder clearDescription() { description_ = getDefaultInstance().getDescription(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Description of the selected proto element (e.g. a message, a method, a * 'service' definition, or a field). Defaults to leading &amp; trailing comments * taken from the proto source definition of the proto element. * </pre> * * <code>string description = 2;</code> * * @param value The bytes for description to set. * @return This builder for chaining. */ public Builder setDescriptionBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); description_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object deprecationDescription_ = ""; /** * * * <pre> * Deprecation description of the selected element(s). It can be provided if * an element is marked as `deprecated`. * </pre> * * <code>string deprecation_description = 3;</code> * * @return The deprecationDescription. */ public java.lang.String getDeprecationDescription() { java.lang.Object ref = deprecationDescription_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); deprecationDescription_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Deprecation description of the selected element(s). It can be provided if * an element is marked as `deprecated`. * </pre> * * <code>string deprecation_description = 3;</code> * * @return The bytes for deprecationDescription. */ public com.google.protobuf.ByteString getDeprecationDescriptionBytes() { java.lang.Object ref = deprecationDescription_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); deprecationDescription_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Deprecation description of the selected element(s). It can be provided if * an element is marked as `deprecated`. * </pre> * * <code>string deprecation_description = 3;</code> * * @param value The deprecationDescription to set. * @return This builder for chaining. */ public Builder setDeprecationDescription(java.lang.String value) { if (value == null) { throw new NullPointerException(); } deprecationDescription_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Deprecation description of the selected element(s). It can be provided if * an element is marked as `deprecated`. * </pre> * * <code>string deprecation_description = 3;</code> * * @return This builder for chaining. */ public Builder clearDeprecationDescription() { deprecationDescription_ = getDefaultInstance().getDeprecationDescription(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Deprecation description of the selected element(s). It can be provided if * an element is marked as `deprecated`. * </pre> * * <code>string deprecation_description = 3;</code> * * @param value The bytes for deprecationDescription to set. * @return This builder for chaining. */ public Builder setDeprecationDescriptionBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); deprecationDescription_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.api.DocumentationRule) } // @@protoc_insertion_point(class_scope:google.api.DocumentationRule) private static final com.google.api.DocumentationRule DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.api.DocumentationRule(); } public static com.google.api.DocumentationRule getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DocumentationRule> PARSER = new com.google.protobuf.AbstractParser<DocumentationRule>() { @java.lang.Override public DocumentationRule parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<DocumentationRule> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DocumentationRule> getParserForType() { return PARSER; } @java.lang.Override public com.google.api.DocumentationRule getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
oracle/coherence
35,130
prj/coherence-core/src/main/java/com/tangosol/internal/metrics/MetricsHttpHandler.java
/* * Copyright (c) 2000, 2025, Oracle and/or its affiliates. * * Licensed under the Universal Permissive License v 1.0 as shown at * https://oss.oracle.com/licenses/upl. */ package com.tangosol.internal.metrics; import com.oracle.coherence.common.base.Exceptions; import com.oracle.coherence.common.base.Logger; import com.sun.net.httpserver.Headers; import com.sun.net.httpserver.HttpExchange; import com.sun.net.httpserver.HttpHandler; import com.tangosol.coherence.config.Config; import com.tangosol.net.metrics.MBeanMetric; import com.tangosol.util.SimpleMapEntry; import java.io.IOException; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.UnsupportedEncodingException; import java.io.Writer; import java.net.URI; import java.net.URLDecoder; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.function.Predicate; import java.util.stream.Collectors; import java.util.stream.Stream; import java.util.zip.GZIPOutputStream; import static java.util.stream.Collectors.toList; /** * Metrics Rest http endpoint * * @author jk 2019.06.24 * @since 12.2.1.4.0 */ public class MetricsHttpHandler implements HttpHandler { // ----- constructors --------------------------------------------------- /** * Create a MetricsResource. * <p> * This constructor will be used by Coherence to create the resource instance. * <p> * The {@code coherence.metrics.legacy.names} system property will be used to * determine whether legacy metric names ot Microprofile compatible metric * names will be used when publishing Prometheus formatted metrics. */ public MetricsHttpHandler() { this(defaultFormat()); } /** * Create a MetricsResource. * * @param format the format to use for metric names and tag keys. */ protected MetricsHttpHandler(Format format) { f_format = format; } // ----- accessors ------------------------------------------------------ /** * Returns the {@link Format} being used for metric names. * * @return the {@link Format} being used for metric names */ public Format getFormat() { return f_format; } /** * Returns the String being used for the context root. * * @return the String being used for the context root * * @since 14.1.2.0.0 */ public String getPath() { return m_sPath; } /** * Setter for path of the context root. * * @param sPath the root path * * @since 14.1.2.0.0 */ public void setPath(String sPath) { m_sPath = sPath; } // ----- HttpHandler methods -------------------------------------------- @Override public void handle(HttpExchange exchange) throws IOException { try { URI requestURI = exchange.getRequestURI(); Map<String, List<String>> mapQuery = getQueryParameters(requestURI); Headers headers = exchange.getRequestHeaders(); String sSuffix = null; String sName = null; List<String> listExtended = mapQuery.remove("extended"); boolean fExtended = f_fAlwaysUseExtended || listExtended != null && !listExtended.isEmpty() && Boolean.parseBoolean(listExtended.get(0)); // The path will always start with the context root path, e.g. /metrics, but may have *anything* after that // as the JDK http server is not fussy String sPath = requestURI.getPath(); if (sPath.equals(getPath()) || sPath.startsWith(getPath() + "/")) { // path is valid so far, as it is either the root path or root + "/"... // strip any .suffix which can be used to override the accepted media type if (sPath.endsWith(".txt")) { sSuffix = ".txt"; sPath = sPath.substring(0, sPath.length() - 4); } else if (sPath.endsWith(".json")) { sSuffix = ".json"; sPath = sPath.substring(0, sPath.length() - 5); } // will be 2 or longer, first element is empty string // valid length is 2, 3, or 4 if 4 is empty String[] asSegment = sPath.split("/"); if (asSegment.length > 4 || (asSegment.length == 4 && asSegment[3].length() != 0)) { // the path is invalid, so send 404 send(exchange, 404); return; } if (asSegment.length >= 3) { // we have a metric name in the path i.e. /metrics/foo sName = asSegment[2]; } } else { // the path is invalid, so send 404 send(exchange, 404); return; } Predicate<MBeanMetric> predicate = createPredicate(sName, mapQuery); MetricsFormatter formatter; if (".txt".equals(sSuffix)) { formatter = getPrometheusMetrics(predicate, fExtended); } else if (".json".equals(sSuffix)) { formatter = getJsonMetrics(predicate, fExtended); } else { formatter = getFormatterForAcceptedType(headers, predicate, fExtended); } if (formatter == null) { // no valid media types in the "Accept" header or path suffix send(exchange, 415); return; } boolean fGzip = false; String sEncoding = headers.getFirst("Accept-Encoding"); if (sEncoding != null) { fGzip = Arrays.stream(sEncoding.split(",")) .map(String::trim) .anyMatch("gzip"::equalsIgnoreCase); } try (OutputStream os = exchange.getResponseBody()) { exchange.getResponseHeaders().set("Content-Type", formatter.getContentType()); if (fGzip) { sendGZippedMetrics(exchange, os, formatter); } else { sendMetrics(exchange, () -> os, formatter); } } } catch (Throwable t) { Logger.err(t); exchange.sendResponseHeaders(500, -1); } } // ----- helper methods ------------------------------------------------- /** * Returns the query parameters present in the URI. * * @param uri the {@link URI} to get the query parameters from * * @return the map of query parameters from the URI, or an empty map if there * were no query parameters */ private Map<String, List<String>> getQueryParameters(URI uri) { String sQuery = uri.getQuery(); if (sQuery == null || sQuery.length() == 0) { return Collections.emptyMap(); } return Arrays.stream(sQuery.split("&")) .map(this::splitQueryParameter) .filter(e -> e.getValue() != null) .collect(Collectors.groupingBy(Map.Entry::getKey, LinkedHashMap::new, Collectors.mapping(Map.Entry::getValue, Collectors.toList()))); } /** * Split the specified key/value query parameter into a {@link Map.Entry} * decoding any encoded characters in the query parameter value. * * @param sParam the query parameter to decode * * @return the query parameter decoded into a {@link Map.Entry} */ private Map.Entry<String, String> splitQueryParameter(String sParam) { try { int nIndex = sParam.indexOf("="); String sKey = nIndex > 0 ? sParam.substring(0, nIndex) : sParam; String sValue = nIndex > 0 && sParam.length() > nIndex + 1 ? sParam.substring(nIndex + 1) : null; String sDecoded = sValue == null ? null : URLDecoder.decode(sValue, "UTF-8"); return new SimpleMapEntry<>(URLDecoder.decode(sKey, "UTF-8"), sDecoded); } catch (UnsupportedEncodingException e) { throw Exceptions.ensureRuntimeException(e); } } /** * Returns the {@link MetricsFormatter} matching the media type in any Accept header * present in the request. * * @param headers the http request headers * @param predicate the optional {@link Predicate} to pass to the {@link MetricsFormatter} * @param fExtended the extended flag to the {@link MetricsFormatter} * * @return the {@link MetricsFormatter} matching the media type in any Accept header * present in the request or {@code null} if there is no Accept header or * no {@link MetricsFormatter} matches the header */ private MetricsFormatter getFormatterForAcceptedType(Headers headers, Predicate<MBeanMetric> predicate, boolean fExtended) { List<String> listAccept = headers.get("Accept"); if (listAccept == null) { return getPrometheusMetrics(predicate, fExtended); } else { for (String sType : listAccept) { String[] asType = sType.split(","); for (String sAccept : asType) { int nIndex = sAccept.indexOf(';'); if (nIndex >= 0) { sAccept = sAccept.substring(0, nIndex); } switch (sAccept.trim()) { case APPLICATION_JSON: return getJsonMetrics(predicate, fExtended); case TEXT_PLAIN: case WILDCARD: return getPrometheusMetrics(predicate, fExtended); } } } } return null; } /** * Send the metrics response. * This method uses a {@link StreamSupplier} to supply the {@link OutputStream} to send the metrics data to. * This is because we must send the response headers before sending any output data but if using an output * stream such as {@link GZIPOutputStream} this sends dat aas soon as it is constructed. By using a supplier * we can delay construction of any stream and hence sending any data until after this method has sent the * response headers. * * @param exchange the {@link HttpExchange} to send the response to * @param supplier a {@link StreamSupplier} to supply the {@link OutputStream} to send the metrics data to * @param formatter the {@link MetricsFormatter} to format the response * * @throws IOException if an error occurs sending the response */ private void sendMetrics(HttpExchange exchange, StreamSupplier supplier, MetricsFormatter formatter) throws IOException { exchange.sendResponseHeaders(200, 0); try (Writer writer = new OutputStreamWriter(supplier.get())) { formatter.writeMetrics(writer); writer.flush(); } } /** * Send the metrics response using gzip to compress the metrics data. * <p> * This method will wrap the specified {@link OutputStream} in a {@link GZIPOutputStream} * before sending data. * * @param exchange the {@link HttpExchange} to send the response to * @param os the {@link OutputStream} to send the metrics data to * @param formatter the {@link MetricsFormatter} to format the response * * @throws IOException if an error occurs sending the response */ private void sendGZippedMetrics(HttpExchange exchange, OutputStream os, MetricsFormatter formatter) throws IOException { exchange.getResponseHeaders().set("Content-Encoding", "gzip"); sendMetrics(exchange, () -> new GZIPOutputStream(os), formatter); } /** * Send a simple http response. * * @param t the {@link HttpExchange} to send the response to * @param status the response status */ private static void send(HttpExchange t, int status) { try { t.sendResponseHeaders(status, 0); try (OutputStream os = t.getResponseBody()) { os.write(EMPTY_BODY); } } catch (IOException e) { e.printStackTrace(); } } /** * Obtain the current Prometheus metrics data. * <p> * If the {@code sName} argument is {@code null} or an empty string, all metrics will be returned. * <p> * If the {@code sName} argument is not {@code null} and not empty, all metrics matching the specified * name will be returned. Metrics can be further filtered by specifying query parameters. * Each name/value pair in the query parameters is used to match the metric tag values of the metrics returned. * Not all of a metrics tags need to be specified, matching is only done on the tags specified in the query * parameters, the metric will match even if it has extra tags not specified in the query parameters. * * @return the current Prometheus metrics data */ protected MetricsFormatter getPrometheusMetrics(Predicate<MBeanMetric> predicate, boolean fExtended) { return new PrometheusFormatter(fExtended, f_format, getMetrics(predicate)); } /** * Obtain the current JSON formatted metrics data for all metrics. * <p> * If the {@code sName} argument is {@code null} or an empty string, all metrics will be returned. * <p> * If the {@code sName} argument is not {@code null} and not empty, all metrics matching the specified * name will be returned. Metrics can be further filtered by specifying query parameters. * Each name/value pair in the query parameters is used to match the metric tag values of the metrics returned. * Not all of a metrics tags need to be specified, matching is only done on the tags specified in the query * parameters, the metric will match even if it has extra tags not specified in the query parameters. * * @return the current JSON formatted metrics data for all metrics */ protected MetricsFormatter getJsonMetrics(Predicate<MBeanMetric> predicate, boolean fExtended) { return new JsonFormatter(fExtended, getMetrics(predicate)); } /** * Determine the metric format to use based on system properties. * * @return the metric format to use based on system properties */ static Format defaultFormat() { if (Config.getBoolean(PROP_USE_MP_NAMES, false)) { return Format.Microprofile; } else if (Config.getBoolean(PROP_USE_DOT_NAMES, false)) { return Format.DotDelimited; } // As of 14.1.2 and 24.09 this property defaults to false, which will remove the // "vendor:" prefix from Prometheus metrics and require use of updated Grafana dashboards else if (Config.getBoolean(PROP_USE_LEGACY_NAMES, false)) { return Format.Legacy; } return Format.Default; } /** * Returns the lst of metrics matching the predicate, or all metrics if * the predicate is {@code null}. * * @param predicate the optional predicate to use to filter the returned metrics * * @return the lst of metrics matching the predicate, or all metrics if * the predicate is {@code null} */ protected List<MBeanMetric> getMetrics(Predicate<MBeanMetric> predicate) { try { Stream<Map.Entry<MBeanMetric.Identifier, MBeanMetric>> stream = DefaultMetricRegistry.getRegistry().stream(); if (predicate != null) { stream = stream.filter(e -> predicate.test(e.getValue())); } return stream.map(Map.Entry::getValue).collect(toList()); } catch (Throwable t) { Logger.err("Exception in MetricsResource.getMetrics():", t); throw t; } } /** * Create a {@link MetricPredicate} from a metric name pattern and tags. * * @param sName the optional metric name pattern to use in the predicate * @param mapTags the optional tags to use in the predicate * * @return the {@link MetricPredicate} to use or {@code null} if neither the name * nor the tags are specified */ private MetricPredicate createPredicate(String sName, Map<String, List<String>> mapTags) { if ((sName == null || sName.isEmpty()) && mapTags.isEmpty()) { return null; } return new MetricPredicate(sName, mapTags); } // ----- inner class: MetricPredicate ----------------------------------- /** * A {@link Predicate} that can be used to restrict the metrics returned by a request. */ protected static class MetricPredicate implements Predicate<MBeanMetric> { /** * Create a predicate. * * @param sName the value to use to match a metric name * @param mapTags the values to use to match a metric tags */ public MetricPredicate(String sName, Map<String, List<String>> mapTags) { f_sName = sName; f_mapTags = mapTags.entrySet() .stream() .filter(e -> !e.getKey().equalsIgnoreCase("extended")) .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().get(0))); } // ----- Predicate methods ------------------------------------------ @Override public boolean test(MBeanMetric metric) { return hasValue(metric) && nameMatches(metric) && tagsMatch(metric); } // ----- helper methods --------------------------------------------- /** * Returns {@code true} if the metric has a non-null value. * * @param metric the metric to check * * @return {@code true} if the metric has a non-null value */ private boolean hasValue(MBeanMetric metric) { return metric.getValue() != null; } /** * Returns {@code true} if the metric name matches this predicate. * * @param metric the metric to check * * @return {@code true} if the metric name matches this predicate */ private boolean nameMatches(MBeanMetric metric) { return f_sName == null || metric.getName().startsWith(f_sName); } /** * Returns {@code true} if the metric tags matches this predicate. * * @param metric the metric to check * * @return {@code true} if the metric tags matches this predicate */ private boolean tagsMatch(MBeanMetric metric) { if (f_mapTags == null || f_mapTags.isEmpty()) { return true; } Map<String, String> mapTags = metric.getTags(); for (String sKey : f_mapTags.keySet()) { if (!f_mapTags.get(sKey).equals(mapTags.get(sKey))) { return false; } } return true; } // ----- data members ----------------------------------------------- private final String f_sName; private final Map<String, String> f_mapTags; } // ----- inner class: PrometheusFormatter ------------------------------- /** * A {@link MetricsFormatter} implementation that writes metrics * in a Prometheus format. */ protected static class PrometheusFormatter implements MetricsFormatter { // ---- constructors ------------------------------------------------ /** * Construct {@code PrometheusFormatter} instance. * * @param fExtended the flag specifying whether to include metric type * and description into the output * @param format the format to use for metric names and tag keys. * @param listMetrics the list of metrics to write */ public PrometheusFormatter(boolean fExtended, Format format, List<MBeanMetric> listMetrics) { f_fExtended = fExtended; f_format = format; f_listMetrics = listMetrics; } // ---- MetricsFormatter interface ---------------------------------- @Override public String getContentType() { return TEXT_PLAIN; } @Override public void writeMetrics(Writer writer) throws IOException { for (MBeanMetric metric : f_listMetrics) { writeMetric(writer, metric); writer.flush(); } } // ----- helper methods --------------------------------------------- /** * Write the metric. * * @param writer the {@link Writer} to write to * @param metric the metric to write * * @throws IOException if an error occurs writing the metric */ private void writeMetric(Writer writer, MBeanMetric metric) throws IOException { Object oValue = metric.getValue(); if (oValue != null) { MBeanMetric.Identifier id = metric.getIdentifier(); Map<String, String> mapTag = id.getPrometheusTags(); String sName; switch (f_format) { case Legacy: sName = id.getLegacyName(); break; case Microprofile: sName = id.getMicroprofileName(); break; case DotDelimited: sName = id.getFormattedName(); break; default: sName = id.getFormattedName().replaceAll("\\.", "_"); } if (f_fExtended) { writeType(writer, sName); writeHelp(writer, sName, metric.getDescription()); } writer.append(sName); writeTags(writer, mapTag); writer.append(' ') .append(oValue.toString()) .append('\n'); } } /** * Write the metric type line. * * @param writer the {@link Writer} to write to * @param sName the metric name * * @throws IOException if an error occurs writing the type line */ private void writeType(Writer writer, String sName) throws IOException { writer.append("# TYPE ").append(sName.trim()).append(" gauge\n"); } /** * Write the metric help description. * * @param writer the {@link Writer} to write to * @param sName the metric name * @param sDescription the metric help description * * @throws IOException if an error occurs writing the help description */ private void writeHelp(Writer writer, String sName, String sDescription) throws IOException { if (sDescription != null && sDescription.length() > 0) { writer.append("# HELP ") .append(sName) .append(' ') .append(sDescription) .append('\n'); } } /** * Write the metric tags. * * @param writer the {@link Writer} to write the tags to * @param mapTags the metric tags to write * * @throws IOException if an error occurs writing the tags */ private void writeTags(Writer writer, Map<String, String> mapTags) throws IOException { if (!mapTags.isEmpty()) { writer.write('{'); Iterator<Map.Entry<String, String>> iterator = mapTags.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<String, String> tag = iterator.next(); writer.append(tag.getKey()) .append("=\"") .append(tag.getValue()) .append('"'); if (iterator.hasNext()) { writer.append(","); } } writer.write('}'); } } // ---- data members ------------------------------------------------ /** * The flag specifying whether to include metric type and description * into the output. */ private final boolean f_fExtended; /** * The format to use for metric names and tag keys. */ private final Format f_format; /** * The list of metrics to write. */ private final List<MBeanMetric> f_listMetrics; } // ----- inner class: JsonFormatter ------------------------------------- /** * A {@link MetricsFormatter} implementation that writes metrics * in a JSON format. */ protected static class JsonFormatter implements MetricsFormatter { // ---- constructors ------------------------------------------------ /** * Construct {@code JsonFormatter} instance. * * @param fExtended the flag specifying whether to include metric type * and description into the output * @param metrics the list of metrics to write */ public JsonFormatter(boolean fExtended, List<MBeanMetric> metrics) { f_fExtended = fExtended; f_metrics = metrics; } // ---- MetricsFormatter interface ---------------------------------- @Override public String getContentType() { return APPLICATION_JSON; } @Override public void writeMetrics(Writer writer) throws IOException { writer.write('['); boolean separator = false; for (MBeanMetric metric : f_metrics) { separator = writeMetric(writer, metric, separator) || separator; } writer.write(']'); } // ----- helper methods --------------------------------------------- /** * Write the metric as a json object. * * @param writer the {@link Writer} to write the json to * @param metric the metric to write * @param fSeparator {@code true} to indicate that a comma separator should precede * the metric json object * * @throws IOException if an error occurs writing the tags */ private boolean writeMetric(Writer writer, MBeanMetric metric, boolean fSeparator) throws IOException { Object oValue = metric.getValue(); if (oValue != null) { MBeanMetric.Identifier id = metric.getIdentifier(); if (fSeparator) { writer.write(','); } writer.write('{'); writer.write("\"name\":\""); writer.write(id.getName()); writer.write("\","); writeTags(writer, id.getTags()); writer.write("\"scope\":\""); writer.write(id.getScope().name()); writer.write("\","); writer.write("\"value\":"); if (oValue instanceof Number || oValue instanceof Boolean) { writer.write(String.valueOf(oValue)); } else { writer.write('"'); writer.write(String.valueOf(oValue)); writer.write('"'); } String sDesc = metric.getDescription(); if (f_fExtended && sDesc != null && sDesc.length() > 0) { writer.write(','); writer.write("\"description\":\""); writer.write(sDesc); writer.write('"'); } writer.write('}'); return true; } return false; } /** * Write the metric tags as a json object. * * @param writer the {@link Writer} to write the json to * @param mapTags the metric tags to write * * @throws IOException if an error occurs writing the tags */ private void writeTags(Writer writer, Map<String, String> mapTags) throws IOException { if (!mapTags.isEmpty()) { String sTags = mapTags.entrySet().stream() .map(e -> '"' + e.getKey() + "\":\"" + e.getValue() + "\"") .collect(Collectors.joining(",")); writer.write("\"tags\":{"); writer.write(sTags); writer.write("},"); } } // ---- data members ------------------------------------------------ /** * The flag specifying whether to include metric type and description * into the output. */ private final boolean f_fExtended; /** * The list of metrics to write. */ private final List<MBeanMetric> f_metrics; } // ----- inner interface: StreamSupplier -------------------------------- /** * A supplier of {@link OutputStream} instances */ @FunctionalInterface private interface StreamSupplier { /** * Return the {@link OutputStream} to use to send a http response. * @return the {@link OutputStream} to use to send a http response * @throws IOException if there is an error creating the stream */ OutputStream get() throws IOException; } // ----- inner enum: Format --------------------------------------------- /** * An enum to represent the format to use for metric names and tag keys. */ public enum Format { /** * Names will the default format without a scope, e.g. coherence_cluster_size */ Default, /** * Names will be a dot delimited without a scope, e.g. coherence.cluster.size */ DotDelimited, /** * Names will be underscore delimited with a scope, e.g. vendor:coherence_cluster_size */ Legacy, /** * Names will be MP 2.0 compatible with a scope, e.g. vendor_Coherence_Cluster_Size */ Microprofile, } // ----- constants ------------------------------------------------------ /** * The System property to use to determine whether to always include * extended information (type and/or description) when publishing metrics. */ protected static final String PROP_EXTENDED = "coherence.metrics.extended"; /** * A flag to determine whether to always include help information when * publishing metrics. */ protected static final boolean f_fAlwaysUseExtended = Boolean.parseBoolean(System.getProperty(PROP_EXTENDED, "false")); /** * A system property that when true outputs metric names using Coherence legacy * format. */ public static final String PROP_USE_LEGACY_NAMES = "coherence.metrics.legacy.names"; /** * A system property that when true outputs metric names as Microprofile 2.0 * compatible metric names. */ public static final String PROP_USE_MP_NAMES = "coherence.metrics.mp.names"; /** * A system property that when true outputs metric names as dot delimited metric names. */ public static final String PROP_USE_DOT_NAMES = "coherence.metrics.dot.names"; /** * The "Accept" header value for the json media type. */ public static final String APPLICATION_JSON = "application/json"; /** * The "Accept" header value for the text media type. */ public static final String TEXT_PLAIN = "text/plain"; /** * The "Accept" header value for the wild-card media type. */ public static final String WILDCARD = "*/*"; /** * An empty byte array to use as an empty response body. */ private static final byte[] EMPTY_BODY = new byte[0]; // ----- data members --------------------------------------------------- /** * The format to use for metric names and tag keys. */ protected Format f_format; /** * The context root path. * * @since 14.1.2.0.0 */ private String m_sPath; }
google/j2objc
35,397
jre_emul/android/platform/libcore/ojluni/src/main/java/java/time/Clock.java
/* * Copyright (c) 2012, 2021, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * This file is available under and governed by the GNU General Public * License version 2 only, as published by the Free Software Foundation. * However, the following notice accompanied the original version of this * file: * * Copyright (c) 2007-2012, Stephen Colebourne & Michael Nascimento Santos * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * * Neither the name of JSR-310 nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package java.time; /* J2ObjC removed import jdk.internal.misc.VM; */ import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectStreamException; import static java.time.LocalTime.NANOS_PER_MINUTE; import static java.time.LocalTime.NANOS_PER_SECOND; import static java.time.LocalTime.NANOS_PER_MILLI; import java.io.Serializable; import java.util.Objects; import java.util.TimeZone; /** * A clock providing access to the current instant, date and time using a time-zone. * * <p>Instances of this abstract class are used to access a pluggable representation of the current * instant, which can be interpreted using the stored time-zone to find the current date and time. * For example, {@code Clock} can be used instead of {@link System#currentTimeMillis()} and {@link * TimeZone#getDefault()}. * * <p>Use of a {@code Clock} is optional. All key date-time classes also have a {@code now()} * factory method that uses the system clock in the default time zone. The primary purpose of this * abstraction is to allow alternate clocks to be plugged in as and when required. Applications use * an object to obtain the current time rather than a static method. This can simplify testing. * * <p>As such, this abstract class does not guarantee the result actually represents the current * instant on the time-line. Instead, it allows the application to provide a controlled view as to * what the current instant and time-zone are. * * <p>Best practice for applications is to pass a {@code Clock} into any method that requires the * current instant and time-zone. A dependency injection framework is one way to achieve this: * * <pre> * public class MyBean { * private Clock clock; // dependency inject * ... * public void process(LocalDate eventDate) { * if (eventDate.isBefore(LocalDate.now(clock)) { * ... * } * } * } * </pre> * * This approach allows an alternative clock, such as {@link #fixed(Instant, ZoneId) fixed} or * {@link #offset(Clock, Duration) offset} to be used during testing. * * <p>The {@code system} factory methods provide clocks based on the best available system clock. * This may use {@link System#currentTimeMillis()}, or a higher resolution clock if one is * available. * * @implSpec This abstract class must be implemented with care to ensure other classes operate * correctly. All implementations must be thread-safe - a single instance must be capable of be * invoked from multiple threads without negative consequences such as race conditions. * <p>The principal methods are defined to allow the throwing of an exception. In normal use, no * exceptions will be thrown, however one possible implementation would be to obtain the time * from a central time server across the network. Obviously, in this case the lookup could fail, * and so the method is permitted to throw an exception. * <p>The returned instants from {@code Clock} work on a time-scale that ignores leap seconds, * as described in {@link Instant}. If the implementation wraps a source that provides leap * second information, then a mechanism should be used to "smooth" the leap second. The Java * Time-Scale mandates the use of UTC-SLS, however clock implementations may choose how accurate * they are with the time-scale so long as they document how they work. Implementations are * therefore not required to actually perform the UTC-SLS slew or to otherwise be aware of leap * seconds. * <p>Implementations should implement {@code Serializable} wherever possible and must document * whether or not they do support serialization. * @since 1.8 */ public abstract class Clock implements InstantSource { /** * Obtains a clock that returns the current instant using the best available * system clock, converting to date and time using the UTC time-zone. * <p> * This clock, rather than {@link #systemDefaultZone()}, should be used when * you need the current instant without the date or time. * <p> * This clock is based on the best available system clock. * This may use {@link System#currentTimeMillis()}, or a higher resolution * clock if one is available. * <p> * Conversion from instant to date or time uses the {@linkplain ZoneOffset#UTC UTC time-zone}. * <p> * The returned implementation is immutable, thread-safe and {@code Serializable}. * It is equivalent to {@code system(ZoneOffset.UTC)}. * * @return a clock that uses the best available system clock in the UTC zone, not null */ public static Clock systemUTC() { return SystemClock.UTC; } /** * Obtains a clock that returns the current instant using the best available * system clock, converting to date and time using the default time-zone. * <p> * This clock is based on the best available system clock. * This may use {@link System#currentTimeMillis()}, or a higher resolution * clock if one is available. * <p> * Using this method hard codes a dependency to the default time-zone into your application. * It is recommended to avoid this and use a specific time-zone whenever possible. * The {@link #systemUTC() UTC clock} should be used when you need the current instant * without the date or time. * <p> * The returned implementation is immutable, thread-safe and {@code Serializable}. * It is equivalent to {@code system(ZoneId.systemDefault())}. * * @return a clock that uses the best available system clock in the default zone, not null * @see ZoneId#systemDefault() */ public static Clock systemDefaultZone() { return new SystemClock(ZoneId.systemDefault()); } /** * Obtains a clock that returns the current instant using the best available * system clock. * <p> * This clock is based on the best available system clock. * This may use {@link System#currentTimeMillis()}, or a higher resolution * clock if one is available. * <p> * Conversion from instant to date or time uses the specified time-zone. * <p> * The returned implementation is immutable, thread-safe and {@code Serializable}. * * @param zone the time-zone to use to convert the instant to date-time, not null * @return a clock that uses the best available system clock in the specified zone, not null */ public static Clock system(ZoneId zone) { Objects.requireNonNull(zone, "zone"); if (zone == ZoneOffset.UTC) { return SystemClock.UTC; } return new SystemClock(zone); } //------------------------------------------------------------------------- /** * Obtains a clock that returns the current instant ticking in whole milliseconds * using the best available system clock. * <p> * This clock will always have the nano-of-second field truncated to milliseconds. * This ensures that the visible time ticks in whole milliseconds. * The underlying clock is the best available system clock, equivalent to * using {@link #system(ZoneId)}. * <p> * Implementations may use a caching strategy for performance reasons. * As such, it is possible that the start of the millisecond observed via this * clock will be later than that observed directly via the underlying clock. * <p> * The returned implementation is immutable, thread-safe and {@code Serializable}. * It is equivalent to {@code tick(system(zone), Duration.ofMillis(1))}. * * @param zone the time-zone to use to convert the instant to date-time, not null * @return a clock that ticks in whole milliseconds using the specified zone, not null * @since 9 */ public static Clock tickMillis(ZoneId zone) { return new TickClock(system(zone), NANOS_PER_MILLI); } //------------------------------------------------------------------------- /** * Obtains a clock that returns the current instant ticking in whole seconds * using the best available system clock. * <p> * This clock will always have the nano-of-second field set to zero. * This ensures that the visible time ticks in whole seconds. * The underlying clock is the best available system clock, equivalent to * using {@link #system(ZoneId)}. * <p> * Implementations may use a caching strategy for performance reasons. * As such, it is possible that the start of the second observed via this * clock will be later than that observed directly via the underlying clock. * <p> * The returned implementation is immutable, thread-safe and {@code Serializable}. * It is equivalent to {@code tick(system(zone), Duration.ofSeconds(1))}. * * @param zone the time-zone to use to convert the instant to date-time, not null * @return a clock that ticks in whole seconds using the specified zone, not null */ public static Clock tickSeconds(ZoneId zone) { return new TickClock(system(zone), NANOS_PER_SECOND); } /** * Obtains a clock that returns the current instant ticking in whole minutes * using the best available system clock. * <p> * This clock will always have the nano-of-second and second-of-minute fields set to zero. * This ensures that the visible time ticks in whole minutes. * The underlying clock is the best available system clock, equivalent to * using {@link #system(ZoneId)}. * <p> * Implementations may use a caching strategy for performance reasons. * As such, it is possible that the start of the minute observed via this * clock will be later than that observed directly via the underlying clock. * <p> * The returned implementation is immutable, thread-safe and {@code Serializable}. * It is equivalent to {@code tick(system(zone), Duration.ofMinutes(1))}. * * @param zone the time-zone to use to convert the instant to date-time, not null * @return a clock that ticks in whole minutes using the specified zone, not null */ public static Clock tickMinutes(ZoneId zone) { return new TickClock(system(zone), NANOS_PER_MINUTE); } /** * Obtains a clock that returns instants from the specified clock truncated * to the nearest occurrence of the specified duration. * <p> * This clock will only tick as per the specified duration. Thus, if the duration * is half a second, the clock will return instants truncated to the half second. * <p> * The tick duration must be positive. If it has a part smaller than a whole * millisecond, then the whole duration must divide into one second without * leaving a remainder. All normal tick durations will match these criteria, * including any multiple of hours, minutes, seconds and milliseconds, and * sensible nanosecond durations, such as 20ns, 250,000ns and 500,000ns. * <p> * A duration of zero or one nanosecond would have no truncation effect. * Passing one of these will return the underlying clock. * <p> * Implementations may use a caching strategy for performance reasons. * As such, it is possible that the start of the requested duration observed * via this clock will be later than that observed directly via the underlying clock. * <p> * The returned implementation is immutable, thread-safe and {@code Serializable} * providing that the base clock is. * * @param baseClock the base clock to base the ticking clock on, not null * @param tickDuration the duration of each visible tick, not negative, not null * @return a clock that ticks in whole units of the duration, not null * @throws IllegalArgumentException if the duration is negative, or has a * part smaller than a whole millisecond such that the whole duration is not * divisible into one second * @throws ArithmeticException if the duration is too large to be represented as nanos */ public static Clock tick(Clock baseClock, Duration tickDuration) { Objects.requireNonNull(baseClock, "baseClock"); Objects.requireNonNull(tickDuration, "tickDuration"); if (tickDuration.isNegative()) { throw new IllegalArgumentException("Tick duration must not be negative"); } long tickNanos = tickDuration.toNanos(); if (tickNanos % 1000_000 == 0) { // ok, no fraction of millisecond } else if (1000_000_000 % tickNanos == 0) { // ok, divides into one second without remainder } else { throw new IllegalArgumentException("Invalid tick duration"); } if (tickNanos <= 1) { return baseClock; } return new TickClock(baseClock, tickNanos); } //----------------------------------------------------------------------- /** * Obtains a clock that always returns the same instant. * <p> * This clock simply returns the specified instant. * As such, it is not a clock in the conventional sense. * The main use case for this is in testing, where the fixed clock ensures * tests are not dependent on the current clock. * <p> * The returned implementation is immutable, thread-safe and {@code Serializable}. * * @param fixedInstant the instant to use as the clock, not null * @param zone the time-zone to use to convert the instant to date-time, not null * @return a clock that always returns the same instant, not null */ public static Clock fixed(Instant fixedInstant, ZoneId zone) { Objects.requireNonNull(fixedInstant, "fixedInstant"); Objects.requireNonNull(zone, "zone"); return new FixedClock(fixedInstant, zone); } // ------------------------------------------------------------------------- /** * Obtains a clock that returns instants from the specified clock with the specified duration * added. * * <p>This clock wraps another clock, returning instants that are later by the specified duration. * If the duration is negative, the instants will be earlier than the current date and time. The * main use case for this is to simulate running in the future or in the past. * * <p>A duration of zero would have no offsetting effect. Passing zero will return the underlying * clock. * * <p>The returned implementation is immutable, thread-safe and {@code Serializable} providing * that the base clock is. * * @param baseClock the base clock to add the duration to, not null * @param offsetDuration the duration to add, not null * @return a clock based on the base clock with the duration added, not null */ public static Clock offset(Clock baseClock, Duration offsetDuration) { Objects.requireNonNull(baseClock, "baseClock"); Objects.requireNonNull(offsetDuration, "offsetDuration"); if (offsetDuration.equals(Duration.ZERO)) { return baseClock; } return new OffsetClock(baseClock, offsetDuration); } //----------------------------------------------------------------------- /** * Constructor accessible by subclasses. */ protected Clock() { } //----------------------------------------------------------------------- /** * Gets the time-zone being used to create dates and times. * <p> * A clock will typically obtain the current instant and then convert that * to a date or time using a time-zone. This method returns the time-zone used. * * @return the time-zone being used to interpret instants, not null */ public abstract ZoneId getZone(); /** * Returns a copy of this clock with a different time-zone. * * <p>A clock will typically obtain the current instant and then convert that to a date or time * using a time-zone. This method returns a clock with similar properties but using a different * time-zone. * * @param zone the time-zone to change to, not null * @return a clock based on this clock with the specified time-zone, not null */ @Override public abstract Clock withZone(ZoneId zone); // ------------------------------------------------------------------------- /** * Gets the current millisecond instant of the clock. * * <p>This returns the millisecond-based instant, measured from 1970-01-01T00:00Z (UTC). This is * equivalent to the definition of {@link System#currentTimeMillis()}. * * <p>Most applications should avoid this method and use {@link Instant} to represent an instant * on the time-line rather than a raw millisecond value. This method is provided to allow the use * of the clock in high performance use cases where the creation of an object would be * unacceptable. * * <p>The default implementation currently calls {@link #instant}. * * @return the current millisecond instant from this clock, measured from the Java epoch of * 1970-01-01T00:00Z (UTC), not null * @throws DateTimeException if the instant cannot be obtained, not thrown by most implementations */ @Override public long millis() { return instant().toEpochMilli(); } // ----------------------------------------------------------------------- /** * Gets the current instant of the clock. * * <p>This returns an instant representing the current instant as defined by the clock. * * @return the current instant from this clock, not null * @throws DateTimeException if the instant cannot be obtained, not thrown by most implementations */ @Override public abstract Instant instant(); //----------------------------------------------------------------------- /** * Checks if this clock is equal to another clock. * <p> * Clocks should override this method to compare equals based on * their state and to meet the contract of {@link Object#equals}. * If not overridden, the behavior is defined by {@link Object#equals} * * @param obj the object to check, null returns false * @return true if this is equal to the other clock */ @Override public boolean equals(Object obj) { return super.equals(obj); } /** * A hash code for this clock. * <p> * Clocks should override this method based on * their state and to meet the contract of {@link Object#hashCode}. * If not overridden, the behavior is defined by {@link Object#hashCode} * * @return a suitable hash code */ @Override public int hashCode() { return super.hashCode(); } // ----------------------------------------------------------------------- // initial offset private static final long OFFSET_SEED = System.currentTimeMillis() / 1000 - 1024; // We don't actually need a volatile here. // We don't care if offset is set or read concurrently by multiple // threads - we just need a value which is 'recent enough' - in other // words something that has been updated at least once in the last // 2^32 secs (~136 years). And even if we by chance see an invalid // offset, the worst that can happen is that we will get a -1 value // from getNanoTimeAdjustment, forcing us to update the offset // once again. private static long offset = OFFSET_SEED; static Instant currentInstant() { // Take a local copy of offset. offset can be updated concurrently // by other threads (even if we haven't made it volatile) so we will // work with a local copy. long localOffset = offset; /* J2ObjC: removed long adjustment = VM.getNanoTimeAdjustment(localOffset); if (adjustment == -1) { // -1 is a sentinel value returned by VM.getNanoTimeAdjustment // when the offset it is given is too far off the current UTC // time. In principle, this should not happen unless the // JVM has run for more than ~136 years (not likely) or // someone is fiddling with the system time, or the offset is // by chance at 1ns in the future (very unlikely). // We can easily recover from all these conditions by bringing // back the offset in range and retry. // bring back the offset in range. We use -1024 to make // it more unlikely to hit the 1ns in the future condition. localOffset = System.currentTimeMillis() / 1000 - 1024; // retry adjustment = VM.getNanoTimeAdjustment(localOffset); if (adjustment == -1) { // Should not happen: we just recomputed a new offset. // It should have fixed the issue. throw new InternalError("Offset " + localOffset + " is not in range"); } else { // OK - recovery succeeded. Update the offset for the // next call... offset = localOffset; } } */ long adjustment = 0L; return Instant.ofEpochSecond(localOffset, adjustment); } // ----------------------------------------------------------------------- /** * An instant source that always returns the latest time from {@link System#currentTimeMillis()} * or equivalent. */ static final class SystemInstantSource implements InstantSource, Serializable { @java.io.Serial private static final long serialVersionUID = 3232399674412L; // this is a singleton, but the class is coded such that it is not a // problem if someone hacks around and creates another instance static final SystemInstantSource INSTANCE = new SystemInstantSource(); SystemInstantSource() {} @Override public Clock withZone(ZoneId zone) { return Clock.system(zone); } @Override public long millis() { // System.currentTimeMillis() and VM.getNanoTimeAdjustment(offset) // use the same time source - System.currentTimeMillis() simply // limits the resolution to milliseconds. // So we take the faster path and call System.currentTimeMillis() // directly - in order to avoid the performance penalty of // VM.getNanoTimeAdjustment(offset) which is less efficient. return System.currentTimeMillis(); } @Override public Instant instant() { return currentInstant(); } @Override public boolean equals(Object obj) { return obj instanceof SystemInstantSource; } @Override public int hashCode() { return SystemInstantSource.class.hashCode(); } @Override public String toString() { return "SystemInstantSource"; } @java.io.Serial private Object readResolve() throws ObjectStreamException { return SystemInstantSource.INSTANCE; } } // ----------------------------------------------------------------------- /** * Implementation of a clock that always returns the latest time from {@code * SystemInstantSource.INSTANCE}. */ static final class SystemClock extends Clock implements Serializable { @java.io.Serial private static final long serialVersionUID = 6740630888130243051L; static final SystemClock UTC = new SystemClock(ZoneOffset.UTC); private final ZoneId zone; SystemClock(ZoneId zone) { this.zone = zone; } @Override public ZoneId getZone() { return zone; } @Override public Clock withZone(ZoneId zone) { if (zone.equals(this.zone)) { // intentional NPE return this; } return new SystemClock(zone); } @Override public long millis() { // inline of SystemInstantSource.INSTANCE.millis() return System.currentTimeMillis(); } @Override public Instant instant() { // inline of SystemInstantSource.INSTANCE.instant() return currentInstant(); } @Override public boolean equals(Object obj) { if (obj instanceof SystemClock) { return zone.equals(((SystemClock) obj).zone); } return false; } @Override public int hashCode() { return zone.hashCode() + 1; } @Override public String toString() { return "SystemClock[" + zone + "]"; } } //----------------------------------------------------------------------- /** * Implementation of a clock that always returns the same instant. * This is typically used for testing. */ static final class FixedClock extends Clock implements Serializable { @java.io.Serial private static final long serialVersionUID = 7430389292664866958L; private final Instant instant; private final ZoneId zone; FixedClock(Instant fixedInstant, ZoneId zone) { this.instant = fixedInstant; this.zone = zone; } @Override public ZoneId getZone() { return zone; } @Override public Clock withZone(ZoneId zone) { if (zone.equals(this.zone)) { // intentional NPE return this; } return new FixedClock(instant, zone); } @Override public long millis() { return instant.toEpochMilli(); } @Override public Instant instant() { return instant; } @Override public boolean equals(Object obj) { if (obj instanceof FixedClock) { FixedClock other = (FixedClock) obj; return instant.equals(other.instant) && zone.equals(other.zone); } return false; } @Override public int hashCode() { return instant.hashCode() ^ zone.hashCode(); } @Override public String toString() { return "FixedClock[" + instant + "," + zone + "]"; } } //----------------------------------------------------------------------- /** * Implementation of a clock that adds an offset to an underlying clock. */ static final class OffsetClock extends Clock implements Serializable { @java.io.Serial private static final long serialVersionUID = 2007484719125426256L; @SuppressWarnings("serial") // Not statically typed as Serializable private final Clock baseClock; private final Duration offset; OffsetClock(Clock baseClock, Duration offset) { this.baseClock = baseClock; this.offset = offset; } @Override public ZoneId getZone() { return baseClock.getZone(); } @Override public Clock withZone(ZoneId zone) { if (zone.equals(baseClock.getZone())) { // intentional NPE return this; } return new OffsetClock(baseClock.withZone(zone), offset); } @Override public long millis() { return Math.addExact(baseClock.millis(), offset.toMillis()); } @Override public Instant instant() { return baseClock.instant().plus(offset); } @Override public boolean equals(Object obj) { if (obj instanceof OffsetClock) { OffsetClock other = (OffsetClock) obj; return baseClock.equals(other.baseClock) && offset.equals(other.offset); } return false; } @Override public int hashCode() { return baseClock.hashCode() ^ offset.hashCode(); } @Override public String toString() { return "OffsetClock[" + baseClock + "," + offset + "]"; } } // ----------------------------------------------------------------------- /** Implementation of a clock that reduces the tick frequency of an underlying clock. */ static final class TickClock extends Clock implements Serializable { @java.io.Serial private static final long serialVersionUID = 6504659149906368850L; @SuppressWarnings("serial") // Not statically typed as Serializable private final Clock baseClock; private final long tickNanos; TickClock(Clock baseClock, long tickNanos) { this.baseClock = baseClock; this.tickNanos = tickNanos; } @Override public ZoneId getZone() { return baseClock.getZone(); } @Override public Clock withZone(ZoneId zone) { if (zone.equals(baseClock.getZone())) { // intentional NPE return this; } return new TickClock(baseClock.withZone(zone), tickNanos); } @Override public long millis() { long millis = baseClock.millis(); return millis - Math.floorMod(millis, tickNanos / 1000_000L); } @Override public Instant instant() { if ((tickNanos % 1000_000) == 0) { long millis = baseClock.millis(); return Instant.ofEpochMilli(millis - Math.floorMod(millis, tickNanos / 1000_000L)); } Instant instant = baseClock.instant(); long nanos = instant.getNano(); long adjust = Math.floorMod(nanos, tickNanos); return instant.minusNanos(adjust); } @Override public boolean equals(Object obj) { if (obj instanceof TickClock) { TickClock other = (TickClock) obj; return baseClock.equals(other.baseClock) && tickNanos == other.tickNanos; } return false; } @Override public int hashCode() { return baseClock.hashCode() ^ ((int) (tickNanos ^ (tickNanos >>> 32))); } @Override public String toString() { return "TickClock[" + baseClock + "," + Duration.ofNanos(tickNanos) + "]"; } } // ----------------------------------------------------------------------- /** Implementation of a clock based on an {@code InstantSource}. */ static final class SourceClock extends Clock implements Serializable { @java.io.Serial private static final long serialVersionUID = 235386528762398L; @SuppressWarnings("serial") // Not statically typed as Serializable private final InstantSource baseSource; private final ZoneId zone; SourceClock(InstantSource baseSource, ZoneId zone) { this.baseSource = baseSource; this.zone = zone; } @Override public ZoneId getZone() { return zone; } @Override public Clock withZone(ZoneId zone) { if (zone.equals(this.zone)) { // intentional NPE return this; } return new SourceClock(baseSource, zone); } @Override public long millis() { return baseSource.millis(); } @Override public Instant instant() { return baseSource.instant(); } @Override public boolean equals(Object obj) { if (obj instanceof SourceClock) { SourceClock other = (SourceClock) obj; return zone.equals(other.zone) && baseSource.equals(other.baseSource); } return false; } @Override public int hashCode() { return baseSource.hashCode() ^ zone.hashCode(); } @Override public String toString() { return "SourceClock[" + baseSource + "," + zone + "]"; } } }
apache/incubator-xtable
35,428
xtable-core/src/test/java/org/apache/xtable/hudi/ITHudiConversionSource.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.xtable.hudi; import static java.util.stream.Collectors.groupingBy; import static org.apache.xtable.testutil.ITTestUtils.validateTable; import static org.junit.jupiter.api.Assertions.*; import java.nio.file.Path; import java.time.Instant; import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; import lombok.Builder; import lombok.Value; import org.apache.avro.Schema; import org.apache.hadoop.conf.Configuration; import org.apache.spark.SparkConf; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.sql.SparkSession; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import org.apache.hudi.client.HoodieReadClient; import org.apache.hudi.common.fs.FSUtils; import org.apache.hudi.common.model.HoodieAvroPayload; import org.apache.hudi.common.model.HoodieRecord; import org.apache.hudi.common.model.HoodieTableType; import org.apache.hudi.common.table.HoodieTableMetaClient; import org.apache.hudi.common.table.timeline.HoodieInstant; import org.apache.xtable.GenericTable; import org.apache.xtable.TestJavaHudiTable; import org.apache.xtable.TestSparkHudiTable; import org.apache.xtable.ValidationTestHelper; import org.apache.xtable.model.CommitsBacklog; import org.apache.xtable.model.InstantsForIncrementalSync; import org.apache.xtable.model.InternalSnapshot; import org.apache.xtable.model.InternalTable; import org.apache.xtable.model.TableChange; import org.apache.xtable.model.schema.InternalField; import org.apache.xtable.model.schema.InternalSchema; import org.apache.xtable.model.schema.InternalType; import org.apache.xtable.model.storage.DataLayoutStrategy; import org.apache.xtable.model.storage.TableFormat; /** * A suite of functional tests that the extraction from Hudi to Intermediate representation works. */ public class ITHudiConversionSource { @TempDir public static Path tempDir; private static JavaSparkContext jsc; private static SparkSession sparkSession; private static final Configuration CONFIGURATION = new Configuration(); @BeforeAll public static void setupOnce() { SparkConf sparkConf = HudiTestUtil.getSparkConf(tempDir); sparkSession = SparkSession.builder().config(HoodieReadClient.addHoodieSupport(sparkConf)).getOrCreate(); sparkSession .sparkContext() .hadoopConfiguration() .set("parquet.avro.write-old-list-structure", "false"); jsc = JavaSparkContext.fromSparkContext(sparkSession.sparkContext()); } @AfterAll public static void teardown() { if (jsc != null) { jsc.close(); } if (sparkSession != null) { sparkSession.close(); } } @Test void getCurrentTableTest() { String tableName = GenericTable.getTableName(); Path basePath = tempDir.resolve(tableName); HudiTestUtil.PartitionConfig partitionConfig = HudiTestUtil.PartitionConfig.of(null, null); Schema schema = Schema.createRecord( "testCurrentTable", null, "hudi", false, Arrays.asList( new Schema.Field("field1", Schema.create(Schema.Type.STRING)), new Schema.Field("field2", Schema.create(Schema.Type.STRING)))); try (TestJavaHudiTable table = TestJavaHudiTable.withSchema( tableName, tempDir, HudiTestUtil.PartitionConfig.of(null, null).getHudiConfig(), HoodieTableType.MERGE_ON_READ, schema)) { table.insertRecords(5, Collections.emptyList(), false); HudiConversionSource hudiClient = getHudiSourceClient( CONFIGURATION, table.getBasePath(), partitionConfig.getXTableConfig()); InternalTable internalTable = hudiClient.getCurrentTable(); InternalSchema internalSchema = InternalSchema.builder() .name("testCurrentTable") .dataType(InternalType.RECORD) .isNullable(false) .fields( Arrays.asList( InternalField.builder() .name("_hoodie_commit_time") .schema( InternalSchema.builder() .name("string") .dataType(InternalType.STRING) .isNullable(true) .build()) .defaultValue(InternalField.Constants.NULL_DEFAULT_VALUE) .build(), InternalField.builder() .name("_hoodie_commit_seqno") .schema( InternalSchema.builder() .name("string") .dataType(InternalType.STRING) .isNullable(true) .build()) .defaultValue(InternalField.Constants.NULL_DEFAULT_VALUE) .build(), InternalField.builder() .name("_hoodie_record_key") .schema( InternalSchema.builder() .name("string") .dataType(InternalType.STRING) .isNullable(true) .build()) .defaultValue(InternalField.Constants.NULL_DEFAULT_VALUE) .build(), InternalField.builder() .name("_hoodie_partition_path") .schema( InternalSchema.builder() .name("string") .dataType(InternalType.STRING) .isNullable(true) .build()) .defaultValue(InternalField.Constants.NULL_DEFAULT_VALUE) .build(), InternalField.builder() .name("_hoodie_file_name") .schema( InternalSchema.builder() .name("string") .dataType(InternalType.STRING) .isNullable(true) .build()) .defaultValue(InternalField.Constants.NULL_DEFAULT_VALUE) .build(), InternalField.builder() .name("field1") .schema( InternalSchema.builder() .name("string") .dataType(InternalType.STRING) .isNullable(false) .build()) .defaultValue(null) .build(), InternalField.builder() .name("field2") .schema( InternalSchema.builder() .name("string") .dataType(InternalType.STRING) .isNullable(false) .build()) .defaultValue(null) .build())) .recordKeyFields(Collections.singletonList(null)) .build(); validateTable( internalTable, tableName, TableFormat.HUDI, internalSchema, DataLayoutStrategy.FLAT, "file:" + basePath + "_v1", internalTable.getLatestMetadataPath(), Collections.emptyList()); } } @ParameterizedTest @MethodSource("testsForAllTableTypesAndPartitions") public void insertAndUpsertData( HoodieTableType tableType, HudiTestUtil.PartitionConfig partitionConfig) { String tableName = GenericTable.getTableName(); try (TestJavaHudiTable table = TestJavaHudiTable.forStandardSchema( tableName, tempDir, partitionConfig.getHudiConfig(), tableType)) { List<List<String>> allBaseFilePaths = new ArrayList<>(); List<TableChange> allTableChanges = new ArrayList<>(); String commitInstant1 = table.startCommit(); List<HoodieRecord<HoodieAvroPayload>> insertsForCommit1; if (partitionConfig.getHudiConfig() != null) { insertsForCommit1 = table.generateRecords(100, "INFO"); } else { insertsForCommit1 = table.generateRecords(100); } table.insertRecordsWithCommitAlreadyStarted(insertsForCommit1, commitInstant1, true); allBaseFilePaths.add(table.getAllLatestBaseFilePaths()); if (partitionConfig.getHudiConfig() != null) { table.insertRecords(100, "WARN", true); } else { table.insertRecords(100, true); } allBaseFilePaths.add(table.getAllLatestBaseFilePaths()); table.upsertRecords(insertsForCommit1.subList(0, 20), true); allBaseFilePaths.add(table.getAllLatestBaseFilePaths()); if (tableType == HoodieTableType.MERGE_ON_READ) { table.compact(); allBaseFilePaths.add(table.getAllLatestBaseFilePaths()); } HudiConversionSource hudiClient = getHudiSourceClient( CONFIGURATION, table.getBasePath(), partitionConfig.getXTableConfig()); // Get the current snapshot InternalSnapshot internalSnapshot = hudiClient.getCurrentSnapshot(); ValidationTestHelper.validateSnapshot( internalSnapshot, allBaseFilePaths.get(allBaseFilePaths.size() - 1)); // Get second change in Incremental format. InstantsForIncrementalSync instantsForIncrementalSync = InstantsForIncrementalSync.builder() .lastSyncInstant(HudiInstantUtils.parseFromInstantTime(commitInstant1)) .build(); CommitsBacklog<HoodieInstant> instantCommitsBacklog = hudiClient.getCommitsBacklog(instantsForIncrementalSync); for (HoodieInstant instant : instantCommitsBacklog.getCommitsToProcess()) { TableChange tableChange = hudiClient.getTableChangeForCommit(instant); allTableChanges.add(tableChange); } ValidationTestHelper.validateTableChanges(allBaseFilePaths, allTableChanges); } } @Test public void testOnlyUpsertsAfterInserts() { HoodieTableType tableType = HoodieTableType.MERGE_ON_READ; HudiTestUtil.PartitionConfig partitionConfig = HudiTestUtil.PartitionConfig.of(null, null); String tableName = "test_table_" + UUID.randomUUID(); try (TestJavaHudiTable table = TestJavaHudiTable.forStandardSchema( tableName, tempDir, partitionConfig.getHudiConfig(), tableType)) { List<List<String>> allBaseFilePaths = new ArrayList<>(); List<TableChange> allTableChanges = new ArrayList<>(); String commitInstant1 = table.startCommit(); List<HoodieRecord<HoodieAvroPayload>> insertsForCommit1; if (partitionConfig.getHudiConfig() != null) { insertsForCommit1 = table.generateRecords(100, "INFO"); } else { insertsForCommit1 = table.generateRecords(100); } table.insertRecordsWithCommitAlreadyStarted(insertsForCommit1, commitInstant1, true); allBaseFilePaths.add(table.getAllLatestBaseFilePaths()); table.upsertRecords(insertsForCommit1.subList(0, 20), true); allBaseFilePaths.add(table.getAllLatestBaseFilePaths()); table.deleteRecords(insertsForCommit1.subList(15, 30), true); allBaseFilePaths.add(table.getAllLatestBaseFilePaths()); HudiConversionSource hudiClient = getHudiSourceClient( CONFIGURATION, table.getBasePath(), partitionConfig.getXTableConfig()); // Get the current snapshot InternalSnapshot internalSnapshot = hudiClient.getCurrentSnapshot(); ValidationTestHelper.validateSnapshot( internalSnapshot, allBaseFilePaths.get(allBaseFilePaths.size() - 1)); // Get second change in Incremental format. InstantsForIncrementalSync instantsForIncrementalSync = InstantsForIncrementalSync.builder() .lastSyncInstant(HudiInstantUtils.parseFromInstantTime(commitInstant1)) .build(); CommitsBacklog<HoodieInstant> instantCommitsBacklog = hudiClient.getCommitsBacklog(instantsForIncrementalSync); for (HoodieInstant instant : instantCommitsBacklog.getCommitsToProcess()) { TableChange tableChange = hudiClient.getTableChangeForCommit(instant); allTableChanges.add(tableChange); } ValidationTestHelper.validateTableChanges(allBaseFilePaths, allTableChanges); } } @Test public void testForIncrementalSyncSafetyCheck() { HoodieTableType tableType = HoodieTableType.COPY_ON_WRITE; HudiTestUtil.PartitionConfig partitionConfig = HudiTestUtil.PartitionConfig.of(null, null); String tableName = GenericTable.getTableName(); try (TestJavaHudiTable table = TestJavaHudiTable.forStandardSchema( tableName, tempDir, partitionConfig.getHudiConfig(), tableType)) { String commitInstant1 = table.startCommit(); List<HoodieRecord<HoodieAvroPayload>> insertsForCommit1 = table.generateRecords(100); table.insertRecordsWithCommitAlreadyStarted(insertsForCommit1, commitInstant1, true); table.upsertRecords(insertsForCommit1.subList(30, 40), true); String commitInstant2 = table.startCommit(); List<HoodieRecord<HoodieAvroPayload>> insertsForCommit2 = table.generateRecords(100); table.insertRecordsWithCommitAlreadyStarted(insertsForCommit2, commitInstant2, true); table.clean(); // cleans up file groups from commitInstant1 HudiConversionSource hudiClient = getHudiSourceClient( CONFIGURATION, table.getBasePath(), partitionConfig.getXTableConfig()); // commitInstant1 is not safe for incremental sync as cleaner has run after and touched // related files. assertFalse( hudiClient.isIncrementalSyncSafeFrom( HudiInstantUtils.parseFromInstantTime(commitInstant1))); // commitInstant2 is safe for incremental sync as cleaner has no affect on data written in // this commit. assertTrue( hudiClient.isIncrementalSyncSafeFrom( HudiInstantUtils.parseFromInstantTime(commitInstant2))); // commit older by an hour is not present in table, hence not safe for incremental sync. Instant instantAsOfHourAgo = Instant.now().minus(1, ChronoUnit.HOURS); assertFalse(hudiClient.isIncrementalSyncSafeFrom(instantAsOfHourAgo)); } } @ParameterizedTest @MethodSource("testsForAllTableTypes") public void testsForDropPartition(HoodieTableType tableType) { String tableName = "test_table_" + UUID.randomUUID(); try (TestSparkHudiTable table = TestSparkHudiTable.forStandardSchema(tableName, tempDir, jsc, "level:SIMPLE", tableType)) { List<List<String>> allBaseFilePaths = new ArrayList<>(); List<TableChange> allTableChanges = new ArrayList<>(); String commitInstant1 = table.startCommit(); List<HoodieRecord<HoodieAvroPayload>> insertsForCommit1 = table.generateRecords(100); table.insertRecordsWithCommitAlreadyStarted(insertsForCommit1, commitInstant1, true); allBaseFilePaths.add(table.getAllLatestBaseFilePaths()); table.insertRecords(100, true); allBaseFilePaths.add(table.getAllLatestBaseFilePaths()); Map<String, List<HoodieRecord>> recordsByPartition = insertsForCommit1.stream().collect(groupingBy(HoodieRecord::getPartitionPath)); String partitionToDelete = recordsByPartition.keySet().stream().sorted().findFirst().get(); table.deletePartition(partitionToDelete, tableType); allBaseFilePaths.add(table.getAllLatestBaseFilePaths()); // Insert few records for deleted partition again to make it interesting. table.insertRecords(20, partitionToDelete, true); allBaseFilePaths.add(table.getAllLatestBaseFilePaths()); HudiConversionSource hudiClient = getHudiSourceClient(CONFIGURATION, table.getBasePath(), "level:VALUE"); // Get the current snapshot InternalSnapshot internalSnapshot = hudiClient.getCurrentSnapshot(); ValidationTestHelper.validateSnapshot( internalSnapshot, allBaseFilePaths.get(allBaseFilePaths.size() - 1)); // Get changes in Incremental format. InstantsForIncrementalSync instantsForIncrementalSync = InstantsForIncrementalSync.builder() .lastSyncInstant(HudiInstantUtils.parseFromInstantTime(commitInstant1)) .build(); CommitsBacklog<HoodieInstant> instantCommitsBacklog = hudiClient.getCommitsBacklog(instantsForIncrementalSync); for (HoodieInstant instant : instantCommitsBacklog.getCommitsToProcess()) { TableChange tableChange = hudiClient.getTableChangeForCommit(instant); allTableChanges.add(tableChange); } ValidationTestHelper.validateTableChanges(allBaseFilePaths, allTableChanges); } } @ParameterizedTest @MethodSource("testsForAllTableTypes") public void testsForDeleteAllRecordsInPartition(HoodieTableType tableType) { String tableName = "test_table_" + UUID.randomUUID(); try (TestSparkHudiTable table = TestSparkHudiTable.forStandardSchema(tableName, tempDir, jsc, "level:SIMPLE", tableType)) { List<List<String>> allBaseFilePaths = new ArrayList<>(); List<TableChange> allTableChanges = new ArrayList<>(); String commitInstant1 = table.startCommit(); List<HoodieRecord<HoodieAvroPayload>> insertsForCommit1 = table.generateRecords(100); table.insertRecordsWithCommitAlreadyStarted(insertsForCommit1, commitInstant1, true); allBaseFilePaths.add(table.getAllLatestBaseFilePaths()); table.insertRecords(100, true); allBaseFilePaths.add(table.getAllLatestBaseFilePaths()); Map<String, List<HoodieRecord<HoodieAvroPayload>>> recordsByPartition = insertsForCommit1.stream().collect(groupingBy(HoodieRecord::getPartitionPath)); String selectedPartition = recordsByPartition.keySet().stream().sorted().findAny().get(); table.deleteRecords(recordsByPartition.get(selectedPartition), true); allBaseFilePaths.add(table.getAllLatestBaseFilePaths()); if (tableType == HoodieTableType.MERGE_ON_READ) { table.compact(); allBaseFilePaths.add(table.getAllLatestBaseFilePaths()); } // Insert few records for deleted partition again to make it interesting. table.insertRecords(20, selectedPartition, true); allBaseFilePaths.add(table.getAllLatestBaseFilePaths()); HudiConversionSource hudiClient = getHudiSourceClient(CONFIGURATION, table.getBasePath(), "level:VALUE"); // Get the current snapshot InternalSnapshot internalSnapshot = hudiClient.getCurrentSnapshot(); ValidationTestHelper.validateSnapshot( internalSnapshot, allBaseFilePaths.get(allBaseFilePaths.size() - 1)); // Get changes in Incremental format. InstantsForIncrementalSync instantsForIncrementalSync = InstantsForIncrementalSync.builder() .lastSyncInstant(HudiInstantUtils.parseFromInstantTime(commitInstant1)) .build(); CommitsBacklog<HoodieInstant> instantCommitsBacklog = hudiClient.getCommitsBacklog(instantsForIncrementalSync); for (HoodieInstant instant : instantCommitsBacklog.getCommitsToProcess()) { TableChange tableChange = hudiClient.getTableChangeForCommit(instant); allTableChanges.add(tableChange); } ValidationTestHelper.validateTableChanges(allBaseFilePaths, allTableChanges); } } @ParameterizedTest @MethodSource("testsForAllTableTypesAndPartitions") public void testsForClustering( HoodieTableType tableType, HudiTestUtil.PartitionConfig partitionConfig) { String tableName = "test_table_" + UUID.randomUUID(); try (TestJavaHudiTable table = TestJavaHudiTable.forStandardSchema( tableName, tempDir, partitionConfig.getHudiConfig(), tableType)) { List<List<String>> allBaseFilePaths = new ArrayList<>(); List<TableChange> allTableChanges = new ArrayList<>(); /* * Insert 100 records. * Insert 100 records. * Upsert 20 records from first commit. * Compact for MOR table. * Insert 100 records. * Run Clustering. * Insert 100 records. */ String commitInstant1 = table.startCommit(); List<HoodieRecord<HoodieAvroPayload>> insertsForCommit1 = table.generateRecords(100); table.insertRecordsWithCommitAlreadyStarted(insertsForCommit1, commitInstant1, true); allBaseFilePaths.add(table.getAllLatestBaseFilePaths()); table.insertRecords(100, true); allBaseFilePaths.add(table.getAllLatestBaseFilePaths()); table.upsertRecords(insertsForCommit1.subList(0, 20), true); allBaseFilePaths.add(table.getAllLatestBaseFilePaths()); if (tableType == HoodieTableType.MERGE_ON_READ) { table.compact(); allBaseFilePaths.add(table.getAllLatestBaseFilePaths()); } table.insertRecords(100, true); allBaseFilePaths.add(table.getAllLatestBaseFilePaths()); table.cluster(); allBaseFilePaths.add(table.getAllLatestBaseFilePaths()); table.insertRecords(100, true); allBaseFilePaths.add(table.getAllLatestBaseFilePaths()); HudiConversionSource hudiClient = getHudiSourceClient( CONFIGURATION, table.getBasePath(), partitionConfig.getXTableConfig()); // Get the current snapshot InternalSnapshot internalSnapshot = hudiClient.getCurrentSnapshot(); ValidationTestHelper.validateSnapshot( internalSnapshot, allBaseFilePaths.get(allBaseFilePaths.size() - 1)); // Get changes in Incremental format. InstantsForIncrementalSync instantsForIncrementalSync = InstantsForIncrementalSync.builder() .lastSyncInstant(HudiInstantUtils.parseFromInstantTime(commitInstant1)) .build(); CommitsBacklog<HoodieInstant> instantCommitsBacklog = hudiClient.getCommitsBacklog(instantsForIncrementalSync); for (HoodieInstant instant : instantCommitsBacklog.getCommitsToProcess()) { TableChange tableChange = hudiClient.getTableChangeForCommit(instant); allTableChanges.add(tableChange); } ValidationTestHelper.validateTableChanges(allBaseFilePaths, allTableChanges); } } @ParameterizedTest @MethodSource("testsForAllTableTypesAndPartitions") public void testsForSavepointRestore( HoodieTableType tableType, HudiTestUtil.PartitionConfig partitionConfig) { String tableName = "test_table_" + UUID.randomUUID(); try (TestJavaHudiTable table = TestJavaHudiTable.forStandardSchema( tableName, tempDir, partitionConfig.getHudiConfig(), tableType)) { List<List<String>> allBaseFilePaths = new ArrayList<>(); List<TableChange> allTableChanges = new ArrayList<>(); String commitInstant1 = table.startCommit(); List<HoodieRecord<HoodieAvroPayload>> insertsForCommit1 = table.generateRecords(50); table.insertRecordsWithCommitAlreadyStarted(insertsForCommit1, commitInstant1, true); allBaseFilePaths.add(table.getAllLatestBaseFilePaths()); // This is the commit we're going to savepoint and restore to table.insertRecords(50, true); allBaseFilePaths.add(table.getAllLatestBaseFilePaths()); List<HoodieRecord<HoodieAvroPayload>> recordList = table.insertRecords(50, true); Set<String> baseFilePaths = new HashSet<>(table.getAllLatestBaseFilePaths()); table.upsertRecords(recordList.subList(0, 20), true); baseFilePaths.addAll(table.getAllLatestBaseFilePaths()); // Note that restore removes all the new base files added by these two commits allBaseFilePaths.add(new ArrayList<>(baseFilePaths)); table.savepointRestoreFromNthMostRecentInstant(2); allBaseFilePaths.add(table.getAllLatestBaseFilePaths()); table.insertRecords(50, true); allBaseFilePaths.add(table.getAllLatestBaseFilePaths()); HudiConversionSource hudiClient = getHudiSourceClient( CONFIGURATION, table.getBasePath(), partitionConfig.getXTableConfig()); // Get the current snapshot InternalSnapshot internalSnapshot = hudiClient.getCurrentSnapshot(); ValidationTestHelper.validateSnapshot( internalSnapshot, allBaseFilePaths.get(allBaseFilePaths.size() - 1)); // Get changes in Incremental format. InstantsForIncrementalSync instantsForIncrementalSync = InstantsForIncrementalSync.builder() .lastSyncInstant(HudiInstantUtils.parseFromInstantTime(commitInstant1)) .build(); CommitsBacklog<HoodieInstant> instantCommitsBacklog = hudiClient.getCommitsBacklog(instantsForIncrementalSync); for (HoodieInstant instant : instantCommitsBacklog.getCommitsToProcess()) { TableChange tableChange = hudiClient.getTableChangeForCommit(instant); allTableChanges.add(tableChange); } IntStream.range(0, allTableChanges.size() - 1) .forEach( i -> { if (i == 1) { // Savepoint: no change ValidationTestHelper.validateTableChange( allBaseFilePaths.get(i), allBaseFilePaths.get(i), allTableChanges.get(i)); } else { ValidationTestHelper.validateTableChange( allBaseFilePaths.get(i), allBaseFilePaths.get(i + 1), allTableChanges.get(i)); } }); } } @ParameterizedTest @MethodSource("testsForAllTableTypesAndPartitions") public void testsForRollbacks( HoodieTableType tableType, HudiTestUtil.PartitionConfig partitionConfig) { String tableName = "test_table_" + UUID.randomUUID(); try (TestJavaHudiTable table = TestJavaHudiTable.forStandardSchema( tableName, tempDir, partitionConfig.getHudiConfig(), tableType)) { String commitInstant1 = table.startCommit(); List<HoodieRecord<HoodieAvroPayload>> insertsForCommit1 = table.generateRecords(50); table.insertRecordsWithCommitAlreadyStarted(insertsForCommit1, commitInstant1, true); List<String> baseFilesAfterCommit1 = table.getAllLatestBaseFilePaths(); String commitInstant2 = table.startCommit(); List<HoodieRecord<HoodieAvroPayload>> insertsForCommit2 = table.generateRecords(50); table.insertRecordsWithCommitAlreadyStarted(insertsForCommit2, commitInstant2, true); List<String> baseFilesAfterCommit2 = table.getAllLatestBaseFilePaths(); String commitInstant3 = table.startCommit(); List<HoodieRecord<HoodieAvroPayload>> insertsForCommit3 = table.generateRecords(50); table.insertRecordsWithCommitAlreadyStarted(insertsForCommit3, commitInstant3, true); List<String> baseFilesAfterCommit3 = table.getAllLatestBaseFilePaths(); table.rollback(commitInstant3); List<String> baseFilesAfterRollback = table.getAllLatestBaseFilePaths(); String commitInstant4 = table.startCommit(); List<HoodieRecord<HoodieAvroPayload>> insertsForCommit4 = table.generateRecords(50); table.insertRecordsWithCommitAlreadyStarted(insertsForCommit4, commitInstant4, true); List<String> baseFilesAfterCommit4 = table.getAllLatestBaseFilePaths(); HudiConversionSource hudiClient = getHudiSourceClient( CONFIGURATION, table.getBasePath(), partitionConfig.getXTableConfig()); // Get the current snapshot InternalSnapshot internalSnapshot = hudiClient.getCurrentSnapshot(); ValidationTestHelper.validateSnapshot(internalSnapshot, baseFilesAfterCommit4); // Get changes in Incremental format. InstantsForIncrementalSync instantsForIncrementalSync = InstantsForIncrementalSync.builder() .lastSyncInstant(HudiInstantUtils.parseFromInstantTime(commitInstant1)) .build(); CommitsBacklog<HoodieInstant> instantCommitsBacklog = hudiClient.getCommitsBacklog(instantsForIncrementalSync); for (HoodieInstant instant : instantCommitsBacklog.getCommitsToProcess()) { TableChange tableChange = hudiClient.getTableChangeForCommit(instant); if (commitInstant2.equals(instant.getTimestamp())) { ValidationTestHelper.validateTableChange( baseFilesAfterCommit1, baseFilesAfterCommit2, tableChange); } else if ("rollback".equals(instant.getAction())) { ValidationTestHelper.validateTableChange( baseFilesAfterCommit3, baseFilesAfterRollback, tableChange); } else if (commitInstant4.equals(instant.getTimestamp())) { ValidationTestHelper.validateTableChange( baseFilesAfterRollback, baseFilesAfterCommit4, tableChange); } else { fail("Please add proper asserts here"); } } } } private static Stream<Arguments> testsForAllTableTypes() { return Stream.of( Arguments.of(HoodieTableType.COPY_ON_WRITE), Arguments.of(HoodieTableType.MERGE_ON_READ)); } private static Stream<Arguments> testsForAllTableTypesAndPartitions() { HudiTestUtil.PartitionConfig unPartitionedConfig = HudiTestUtil.PartitionConfig.of(null, null); HudiTestUtil.PartitionConfig partitionedConfig = HudiTestUtil.PartitionConfig.of("level:SIMPLE", "level:VALUE"); List<HudiTestUtil.PartitionConfig> partitionConfigs = Arrays.asList(unPartitionedConfig, partitionedConfig); List<HoodieTableType> tableTypes = Arrays.asList(HoodieTableType.COPY_ON_WRITE, HoodieTableType.MERGE_ON_READ); return tableTypes.stream() .flatMap( tableType -> partitionConfigs.stream().map(config -> Arguments.of(tableType, config))); } private HudiConversionSource getHudiSourceClient( Configuration conf, String basePath, String xTablePartitionConfig) { HoodieTableMetaClient hoodieTableMetaClient = HoodieTableMetaClient.builder() .setConf(conf) .setBasePath(basePath) .setLoadActiveTimelineOnLoad(true) .build(); PathBasedPartitionSpecExtractor partitionSpecExtractor = new ConfigurationBasedPartitionSpecExtractor( HudiSourceConfig.parsePartitionFieldSpecs(xTablePartitionConfig)); return new HudiConversionSource(hoodieTableMetaClient, partitionSpecExtractor); } private boolean checkIfNewFileGroupIsAdded(String activePath, TableChange tableChange) { String activePathFileGroupId = getFileGroupInfo(activePath).getFileId(); String activePathCommitTime = getFileGroupInfo(activePath).getCommitTime(); Map<String, String> fileIdToCommitTimeMap = tableChange.getFilesDiff().getFilesAdded().stream() .collect( Collectors.groupingBy( oneDf -> getFileGroupInfo(oneDf.getPhysicalPath()).getFileId(), Collectors.collectingAndThen( Collectors.mapping( oneDf -> getFileGroupInfo(oneDf.getPhysicalPath()).getCommitTime(), Collectors.toList()), list -> { if (list.size() > 1) { throw new IllegalStateException( "Some fileIds have more than one commit time."); } return list.get(0); }))); if (!fileIdToCommitTimeMap.containsKey(activePathFileGroupId)) { return false; } Instant newCommitInstant = HudiInstantUtils.parseFromInstantTime(fileIdToCommitTimeMap.get(activePathFileGroupId)); Instant oldCommitInstant = HudiInstantUtils.parseFromInstantTime(activePathCommitTime); return newCommitInstant.isAfter(oldCommitInstant); } private boolean checkIfFileIsRemoved(String activePath, TableChange tableChange) { String activePathFileGroupId = getFileGroupInfo(activePath).getFileId(); String activePathCommitTime = getFileGroupInfo(activePath).getCommitTime(); Map<String, String> fileIdToCommitTimeMap = tableChange.getFilesDiff().getFilesRemoved().stream() .collect( Collectors.groupingBy( oneDf -> getFileGroupInfo(oneDf.getPhysicalPath()).getFileId(), Collectors.collectingAndThen( Collectors.mapping( oneDf -> getFileGroupInfo(oneDf.getPhysicalPath()).getCommitTime(), Collectors.toList()), list -> { if (list.size() > 1) { throw new IllegalStateException( "Some fileIds have more than one commit time."); } return list.get(0); }))); if (!fileIdToCommitTimeMap.containsKey(activePathFileGroupId)) { return false; } if (!fileIdToCommitTimeMap.get(activePathFileGroupId).equals(activePathCommitTime)) { return false; } return true; } private FileGroupInfo getFileGroupInfo(String path) { String[] pathParts = path.split("/"); String fileName = pathParts[pathParts.length - 1]; return FileGroupInfo.builder() .fileId(FSUtils.getFileId(fileName)) .commitTime(FSUtils.getCommitTime(fileName)) .build(); } @Builder @Value private static class FileGroupInfo { String fileId; String commitTime; } }
apache/zeppelin
34,624
jdbc/src/test/java/org/apache/zeppelin/jdbc/JDBCInterpreterTest.java
/** * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.zeppelin.jdbc; import org.apache.zeppelin.completer.CompletionType; import org.apache.zeppelin.interpreter.InterpreterContext; import org.apache.zeppelin.interpreter.InterpreterException; import org.apache.commons.lang3.StringUtils; import org.apache.zeppelin.interpreter.InterpreterOutput; import org.apache.zeppelin.interpreter.InterpreterResult; import org.apache.zeppelin.interpreter.InterpreterResultMessage; import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion; import org.apache.zeppelin.scheduler.FIFOScheduler; import org.apache.zeppelin.scheduler.ParallelScheduler; import org.apache.zeppelin.scheduler.Scheduler; import org.apache.zeppelin.user.AuthenticationInfo; import org.apache.zeppelin.user.UserCredentials; import org.apache.zeppelin.user.UsernamePassword; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.Statement; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.concurrent.TimeoutException; import com.mockrunner.jdbc.BasicJDBCTestCaseAdapter; import net.jodah.concurrentunit.Waiter; import static java.lang.String.format; import static org.apache.zeppelin.jdbc.JDBCInterpreter.COMMON_MAX_LINE; import static org.apache.zeppelin.jdbc.JDBCInterpreter.DEFAULT_DRIVER; import static org.apache.zeppelin.jdbc.JDBCInterpreter.DEFAULT_PASSWORD; import static org.apache.zeppelin.jdbc.JDBCInterpreter.DEFAULT_PRECODE; import static org.apache.zeppelin.jdbc.JDBCInterpreter.DEFAULT_STATEMENT_PRECODE; import static org.apache.zeppelin.jdbc.JDBCInterpreter.DEFAULT_URL; import static org.apache.zeppelin.jdbc.JDBCInterpreter.DEFAULT_USER; import static org.apache.zeppelin.jdbc.JDBCInterpreter.PRECODE_KEY_TEMPLATE; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.fail; import static org.junit.jupiter.api.Assertions.assertEquals; /** * JDBC interpreter unit tests. */ public class JDBCInterpreterTest extends BasicJDBCTestCaseAdapter { static String jdbcConnection; InterpreterContext context; private static String getJdbcConnection() throws IOException { if (null == jdbcConnection) { Path tmpDir = Files.createTempDirectory("h2-test-"); tmpDir.toFile().deleteOnExit(); jdbcConnection = format("jdbc:h2:%s", tmpDir); } return jdbcConnection; } public static Properties getJDBCTestProperties() { Properties p = new Properties(); p.setProperty("default.driver", "org.postgresql.Driver"); p.setProperty("default.url", "jdbc:postgresql://localhost:5432/"); p.setProperty("default.user", "gpadmin"); p.setProperty("default.password", ""); p.setProperty("common.max_count", "1000"); return p; } @Override @BeforeEach public void setUp() throws Exception { Class.forName("org.h2.Driver"); Connection connection = DriverManager.getConnection(getJdbcConnection()); Statement statement = connection.createStatement(); statement.execute( "DROP TABLE IF EXISTS test_table; " + "CREATE TABLE test_table(id varchar(255), name varchar(255));"); statement.execute( "CREATE USER IF NOT EXISTS dbuser PASSWORD 'dbpassword';" + "CREATE USER IF NOT EXISTS user1Id PASSWORD 'user1Pw';" + "CREATE USER IF NOT EXISTS user2Id PASSWORD 'user2Pw';" ); PreparedStatement insertStatement = connection.prepareStatement( "insert into test_table(id, name) values ('a', 'a_name'),('b', 'b_name'),('c', ?);"); insertStatement.setString(1, null); insertStatement.execute(); context = InterpreterContext.builder() .setAuthenticationInfo(new AuthenticationInfo("testUser")) .setParagraphId("paragraphId") .setInterpreterOut(new InterpreterOutput()) .build(); } @Test void testForParsePropertyKey() { JDBCInterpreter t = new JDBCInterpreter(new Properties()); Map<String, String> localProperties = new HashMap<>(); InterpreterContext interpreterContext = InterpreterContext.builder() .setLocalProperties(localProperties) .build(); assertEquals(JDBCInterpreter.DEFAULT_KEY, t.getDBPrefix(interpreterContext)); localProperties = new HashMap<>(); localProperties.put("db", "mysql"); interpreterContext = InterpreterContext.builder() .setLocalProperties(localProperties) .build(); assertEquals("mysql", t.getDBPrefix(interpreterContext)); localProperties = new HashMap<>(); localProperties.put("hive", "hive"); interpreterContext = InterpreterContext.builder() .setLocalProperties(localProperties) .build(); assertEquals("hive", t.getDBPrefix(interpreterContext)); } /** * DBprefix like %jdbc(db=mysql) or %jdbc(mysql) is not supported anymore * JDBC Interpreter would try to use default config. */ @Test void testDBPrefixProhibited() throws IOException, InterpreterException { Properties properties = new Properties(); properties.setProperty("common.max_count", "1000"); properties.setProperty("common.max_retry", "3"); properties.setProperty("default.driver", "org.h2.Driver"); properties.setProperty("default.url", getJdbcConnection()); properties.setProperty("default.user", ""); properties.setProperty("default.password", ""); JDBCInterpreter t = new JDBCInterpreter(properties); t.open(); String sqlQuery = "select * from test_table"; Map<String, String> localProperties = new HashMap<>(); localProperties.put("db", "fake"); InterpreterContext context = InterpreterContext.builder() .setAuthenticationInfo(new AuthenticationInfo("testUser")) .setLocalProperties(localProperties) .setParagraphId("paragraphId") .setInterpreterOut(new InterpreterOutput()) .build(); InterpreterResult interpreterResult = t.interpret(sqlQuery, context); // The result should be the same as that run with default config assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code()); List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage(); assertEquals("ID\tNAME\na\ta_name\nb\tb_name\nc\tnull\n", resultMessages.get(0).getData()); } @Test void testDefaultProperties() { JDBCInterpreter jdbcInterpreter = new JDBCInterpreter(getJDBCTestProperties()); assertEquals("org.postgresql.Driver", jdbcInterpreter.getProperty(DEFAULT_DRIVER)); assertEquals("jdbc:postgresql://localhost:5432/", jdbcInterpreter.getProperty(DEFAULT_URL)); assertEquals("gpadmin", jdbcInterpreter.getProperty(DEFAULT_USER)); assertEquals("", jdbcInterpreter.getProperty(DEFAULT_PASSWORD)); assertEquals("1000", jdbcInterpreter.getProperty(COMMON_MAX_LINE)); } @Test void testSelectQuery() throws IOException, InterpreterException { Properties properties = new Properties(); properties.setProperty("common.max_count", "1000"); properties.setProperty("common.max_retry", "3"); properties.setProperty("default.driver", "org.h2.Driver"); properties.setProperty("default.url", getJdbcConnection()); properties.setProperty("default.user", ""); properties.setProperty("default.password", ""); JDBCInterpreter t = new JDBCInterpreter(properties); t.open(); String sqlQuery = "select * from test_table WHERE ID in ('a', 'b'); "; InterpreterResult interpreterResult = t.interpret(sqlQuery, context); assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code()); List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage(); assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(0).getType()); assertEquals("ID\tNAME\na\ta_name\nb\tb_name\n", resultMessages.get(0).getData()); context = getInterpreterContext(); context.getLocalProperties().put("limit", "1"); interpreterResult = t.interpret(sqlQuery, context); resultMessages = context.out.toInterpreterResultMessage(); assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code()); assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(0).getType()); assertEquals("ID\tNAME\na\ta_name\n", resultMessages.get(0).getData()); } @Test void testSelectWithRefresh() throws IOException, InterruptedException, TimeoutException { Properties properties = new Properties(); properties.setProperty("common.max_count", "1000"); properties.setProperty("common.max_retry", "3"); properties.setProperty("default.driver", "org.h2.Driver"); properties.setProperty("default.url", getJdbcConnection()); properties.setProperty("default.user", ""); properties.setProperty("default.password", ""); JDBCInterpreter t = new JDBCInterpreter(properties); t.open(); final Waiter waiter = new Waiter(); context.getLocalProperties().put("refreshInterval", "1000"); Thread thread = new Thread(() -> { String sqlQuery = "select * from test_table WHERE ID in ('a', 'b');"; try { InterpreterResult interpreterResult = t.interpret(sqlQuery, context); assertEquals(InterpreterResult.Code.ERROR, interpreterResult.code()); } catch (InterpreterException e) { fail("Should not be here"); } waiter.resume(); }); thread.start(); Thread.sleep(5000); t.cancel(context); waiter.await(5000); } @Test void testInvalidSelectWithRefresh() throws IOException, InterpreterException { Properties properties = new Properties(); properties.setProperty("common.max_count", "1000"); properties.setProperty("common.max_retry", "3"); properties.setProperty("default.driver", "org.h2.Driver"); properties.setProperty("default.url", getJdbcConnection()); properties.setProperty("default.user", ""); properties.setProperty("default.password", ""); JDBCInterpreter t = new JDBCInterpreter(properties); t.open(); context.getLocalProperties().put("refreshInterval", "1000"); String sqlQuery = "select * from invalid_table;"; InterpreterResult interpreterResult = t.interpret(sqlQuery, context); assertEquals(InterpreterResult.Code.ERROR, interpreterResult.code()); assertTrue(interpreterResult.message() .get(0).getData().contains("Table \"INVALID_TABLE\" not found;"), interpreterResult.toString()); } @Test void testColumnAliasQuery() throws IOException, InterpreterException { Properties properties = new Properties(); properties.setProperty("common.max_count", "1000"); properties.setProperty("common.max_retry", "3"); properties.setProperty("default.driver", "org.h2.Driver"); properties.setProperty("default.url", getJdbcConnection()); properties.setProperty("default.user", ""); properties.setProperty("default.password", ""); JDBCInterpreter t = new JDBCInterpreter(properties); t.open(); String sqlQuery = "select NAME as SOME_OTHER_NAME from test_table limit 1"; InterpreterResult interpreterResult = t.interpret(sqlQuery, context); List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage(); assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code(), interpreterResult.toString()); assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(0).getType()); assertEquals("SOME_OTHER_NAME\na_name\n", resultMessages.get(0).getData()); } @Test void testSplitSqlQuery() { String sqlQuery = "insert into test_table(id, name) values ('a', ';\"');" + "select * from test_table;" + "select * from test_table WHERE ID = \";'\";" + "select * from test_table WHERE ID = ';';" + "select '\n', ';';" + "select replace('A\\;B', '\\', 'text');" + "select '\\', ';';" + "select '''', ';';" + "select /*+ scan */ * from test_table;" + "--singleLineComment\nselect * from test_table;"; Properties properties = new Properties(); JDBCInterpreter t = new JDBCInterpreter(properties); t.open(); List<String> multipleSqlArray = t.splitSqlQueries(sqlQuery); assertEquals(10, multipleSqlArray.size()); assertEquals("insert into test_table(id, name) values ('a', ';\"')", multipleSqlArray.get(0)); assertEquals("select * from test_table", multipleSqlArray.get(1)); assertEquals("select * from test_table WHERE ID = \";'\"", multipleSqlArray.get(2)); assertEquals("select * from test_table WHERE ID = ';'", multipleSqlArray.get(3)); assertEquals("select '\n', ';'", multipleSqlArray.get(4)); assertEquals("\nselect replace('A\\;B', '\\', 'text')", multipleSqlArray.get(5)); assertEquals("\nselect '\\', ';'", multipleSqlArray.get(6)); assertEquals("\nselect '''', ';'", multipleSqlArray.get(7)); assertEquals("\nselect /*+ scan */ * from test_table", multipleSqlArray.get(8)); assertEquals("\n\nselect * from test_table", multipleSqlArray.get(9)); } @Test void testQueryWithEscapedCharacters() throws IOException, InterpreterException { String sqlQuery = "select '\\n', ';';" + "select replace('A\\;B', '\\', 'text');" + "select '\\', ';';" + "select '''', ';'"; Properties properties = new Properties(); properties.setProperty("common.max_count", "1000"); properties.setProperty("common.max_retry", "3"); properties.setProperty("default.driver", "org.h2.Driver"); properties.setProperty("default.url", getJdbcConnection()); properties.setProperty("default.user", ""); properties.setProperty("default.password", ""); properties.setProperty("default.splitQueries", "true"); JDBCInterpreter t = new JDBCInterpreter(properties); t.open(); InterpreterResult interpreterResult = t.interpret(sqlQuery, context); assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code()); List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage(); assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(0).getType()); assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(1).getType()); assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(2).getType()); assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(3).getType()); assertEquals("'\\n'\t';'\n\\n\t;\n", resultMessages.get(0).getData()); assertEquals("'Atext;B'\nAtext;B\n", resultMessages.get(1).getData()); assertEquals("'\\'\t';'\n\\\t;\n", resultMessages.get(2).getData()); assertEquals("''''\t';'\n'\t;\n", resultMessages.get(3).getData()); } @Test void testSelectMultipleQueries() throws IOException, InterpreterException { Properties properties = new Properties(); properties.setProperty("common.max_count", "1000"); properties.setProperty("common.max_retry", "3"); properties.setProperty("default.driver", "org.h2.Driver"); properties.setProperty("default.url", getJdbcConnection()); properties.setProperty("default.user", ""); properties.setProperty("default.password", ""); properties.setProperty("default.splitQueries", "true"); JDBCInterpreter t = new JDBCInterpreter(properties); t.open(); String sqlQuery = "select * from test_table;" + "select * from test_table WHERE ID = ';';"; InterpreterResult interpreterResult = t.interpret(sqlQuery, context); assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code()); List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage(); assertEquals(2, resultMessages.size()); assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(0).getType()); assertEquals("ID\tNAME\na\ta_name\nb\tb_name\nc\tnull\n", resultMessages.get(0).getData()); assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(1).getType()); assertEquals("ID\tNAME\n", resultMessages.get(1).getData()); } @Test void testDefaultSplitQuries() throws IOException, InterpreterException { Properties properties = new Properties(); properties.setProperty("common.max_count", "1000"); properties.setProperty("common.max_retry", "3"); properties.setProperty("default.driver", "org.h2.Driver"); properties.setProperty("default.url", getJdbcConnection()); properties.setProperty("default.user", ""); properties.setProperty("default.password", ""); JDBCInterpreter t = new JDBCInterpreter(properties); t.open(); String sqlQuery = "select * from test_table;" + "select * from test_table WHERE ID = ';';"; InterpreterResult interpreterResult = t.interpret(sqlQuery, context); assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code()); List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage(); assertEquals(2, resultMessages.size()); assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(0).getType()); assertEquals("ID\tNAME\na\ta_name\nb\tb_name\nc\tnull\n", resultMessages.get(0).getData()); assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(1).getType()); assertEquals("ID\tNAME\n", resultMessages.get(1).getData()); } @Test void testSelectQueryWithNull() throws IOException, InterpreterException { Properties properties = new Properties(); properties.setProperty("common.max_count", "1000"); properties.setProperty("common.max_retry", "3"); properties.setProperty("default.driver", "org.h2.Driver"); properties.setProperty("default.url", getJdbcConnection()); properties.setProperty("default.user", ""); properties.setProperty("default.password", ""); JDBCInterpreter t = new JDBCInterpreter(properties); t.open(); String sqlQuery = "select * from test_table WHERE ID = 'c'"; InterpreterResult interpreterResult = t.interpret(sqlQuery, context); List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage(); assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code()); assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(0).getType()); assertEquals("ID\tNAME\nc\tnull\n", resultMessages.get(0).getData()); } @Test void testSelectQueryMaxResult() throws IOException, InterpreterException { Properties properties = new Properties(); properties.setProperty("common.max_count", "1"); properties.setProperty("common.max_retry", "3"); properties.setProperty("default.driver", "org.h2.Driver"); properties.setProperty("default.url", getJdbcConnection()); properties.setProperty("default.user", ""); properties.setProperty("default.password", ""); JDBCInterpreter t = new JDBCInterpreter(properties); t.open(); String sqlQuery = "select * from test_table"; InterpreterResult interpreterResult = t.interpret(sqlQuery, context); assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code()); List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage(); assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(0).getType()); assertEquals("ID\tNAME\na\ta_name\n", resultMessages.get(0).getData()); assertEquals(InterpreterResult.Type.HTML, resultMessages.get(1).getType()); assertTrue(resultMessages.get(1).getData().contains("Output is truncated")); } @Test void concurrentSettingTest() { Properties properties = new Properties(); properties.setProperty("zeppelin.jdbc.concurrent.use", "true"); properties.setProperty("zeppelin.jdbc.concurrent.max_connection", "10"); JDBCInterpreter jdbcInterpreter = new JDBCInterpreter(properties); assertTrue(jdbcInterpreter.isConcurrentExecution()); assertEquals(10, jdbcInterpreter.getMaxConcurrentConnection()); Scheduler scheduler = jdbcInterpreter.getScheduler(); assertTrue(scheduler instanceof ParallelScheduler); properties.clear(); properties.setProperty("zeppelin.jdbc.concurrent.use", "false"); jdbcInterpreter = new JDBCInterpreter(properties); assertFalse(jdbcInterpreter.isConcurrentExecution()); scheduler = jdbcInterpreter.getScheduler(); assertTrue(scheduler instanceof FIFOScheduler); } @Test void testAutoCompletion() throws IOException, InterpreterException { Properties properties = new Properties(); properties.setProperty("common.max_count", "1000"); properties.setProperty("common.max_retry", "3"); properties.setProperty("default.driver", "org.h2.Driver"); properties.setProperty("default.url", getJdbcConnection()); properties.setProperty("default.user", ""); properties.setProperty("default.password", ""); JDBCInterpreter jdbcInterpreter = new JDBCInterpreter(properties); jdbcInterpreter.open(); jdbcInterpreter.interpret("", context); List<InterpreterCompletion> completionList = jdbcInterpreter.completion("sel", 3, context); InterpreterCompletion correctCompletionKeyword = new InterpreterCompletion("select", "select", CompletionType.keyword.name()); assertEquals(1, completionList.size()); assertEquals(true, completionList.contains(correctCompletionKeyword)); } private Properties getDBProperty(String dbPrefix, String dbUser, String dbPassowrd) throws IOException { Properties properties = new Properties(); properties.setProperty("common.max_count", "1000"); properties.setProperty("common.max_retry", "3"); if (!StringUtils.isBlank(dbPrefix)) { properties.setProperty(dbPrefix + ".driver", "org.h2.Driver"); properties.setProperty(dbPrefix + ".url", getJdbcConnection()); properties.setProperty(dbPrefix + ".user", dbUser); properties.setProperty(dbPrefix + ".password", dbPassowrd); } else { properties.setProperty("default.driver", "org.h2.Driver"); properties.setProperty("default.url", getJdbcConnection()); properties.setProperty("default.user", dbUser); properties.setProperty("default.password", dbPassowrd); } return properties; } private AuthenticationInfo getUserAuth(String user, String entityName, String dbUser, String dbPassword) { UserCredentials userCredentials = new UserCredentials(); if (entityName != null && dbUser != null && dbPassword != null) { UsernamePassword up = new UsernamePassword(dbUser, dbPassword); userCredentials.putUsernamePassword(entityName, up); } AuthenticationInfo authInfo = new AuthenticationInfo(); authInfo.setUserCredentials(userCredentials); authInfo.setUser(user); return authInfo; } @Test void testMultiTenant_1() throws IOException, InterpreterException { // user1 %jdbc select from default db // user2 %jdbc select from default db Properties properties = getDBProperty("default", "dbuser", "dbpassword"); properties.putAll(getDBProperty("hive", "", "")); JDBCInterpreter jdbc = new JDBCInterpreter(properties); AuthenticationInfo user1Credential = getUserAuth("user1", null, null, null); AuthenticationInfo user2Credential = getUserAuth("user2", "hive", "user2Id", "user2Pw"); jdbc.open(); // user1 runs default InterpreterContext context = InterpreterContext.builder() .setAuthenticationInfo(user1Credential) .setInterpreterOut(new InterpreterOutput()) .setReplName("jdbc") .build(); jdbc.interpret("", context); JDBCUserConfigurations user1JDBC1Conf = jdbc.getJDBCConfiguration("user1"); assertEquals("dbuser", user1JDBC1Conf.getProperty().get("user")); assertEquals("dbpassword", user1JDBC1Conf.getProperty().get("password")); // user2 run default context = InterpreterContext.builder() .setAuthenticationInfo(user2Credential) .setInterpreterOut(new InterpreterOutput()) .setReplName("jdbc") .build(); jdbc.interpret("", context); JDBCUserConfigurations user2JDBC1Conf = jdbc.getJDBCConfiguration("user2"); assertEquals("dbuser", user2JDBC1Conf.getProperty().get("user")); assertEquals("dbpassword", user2JDBC1Conf.getProperty().get("password")); jdbc.close(); } @Test void testMultiTenant_2() throws IOException, InterpreterException { // user1 %hive select from default db // user2 %hive select from default db Properties properties = getDBProperty("default", "", ""); JDBCInterpreter jdbc = new JDBCInterpreter(properties); AuthenticationInfo user1Credential = getUserAuth("user1", "hive", "user1Id", "user1Pw"); AuthenticationInfo user2Credential = getUserAuth("user2", "hive", "user2Id", "user2Pw"); jdbc.open(); // user1 runs default InterpreterContext context = InterpreterContext.builder() .setAuthenticationInfo(user1Credential) .setInterpreterOut(new InterpreterOutput()) .setReplName("hive") .build(); jdbc.interpret("", context); JDBCUserConfigurations user1JDBC1Conf = jdbc.getJDBCConfiguration("user1"); assertEquals("user1Id", user1JDBC1Conf.getProperty().get("user")); assertEquals("user1Pw", user1JDBC1Conf.getProperty().get("password")); // user2 run default context = InterpreterContext.builder() .setAuthenticationInfo(user2Credential) .setInterpreterOut(new InterpreterOutput()) .setReplName("hive") .build(); jdbc.interpret("", context); JDBCUserConfigurations user2JDBC1Conf = jdbc.getJDBCConfiguration("user2"); assertEquals("user2Id", user2JDBC1Conf.getProperty().get("user")); assertEquals("user2Pw", user2JDBC1Conf.getProperty().get("password")); jdbc.close(); } @Test void testPrecode() throws IOException, InterpreterException { Properties properties = new Properties(); properties.setProperty("default.driver", "org.h2.Driver"); properties.setProperty("default.url", getJdbcConnection()); properties.setProperty("default.user", ""); properties.setProperty("default.password", ""); properties.setProperty(DEFAULT_PRECODE, "create table test_precode (id int); insert into test_precode values (1);"); JDBCInterpreter jdbcInterpreter = new JDBCInterpreter(properties); jdbcInterpreter.open(); jdbcInterpreter.executePrecode(context); String sqlQuery = "select * from test_precode"; InterpreterResult interpreterResult = jdbcInterpreter.interpret(sqlQuery, context); assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code()); List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage(); assertEquals(3, resultMessages.size()); assertEquals(InterpreterResult.Type.TEXT, resultMessages.get(0).getType()); assertEquals("Query executed successfully. Affected rows : 0\n\n", resultMessages.get(0).getData()); assertEquals(InterpreterResult.Type.TEXT, resultMessages.get(1).getType()); assertEquals("Query executed successfully. Affected rows : 1\n", resultMessages.get(1).getData()); assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(2).getType()); assertEquals("ID\n1\n", resultMessages.get(2).getData()); } @Test void testIncorrectPrecode() throws IOException, InterpreterException { Properties properties = new Properties(); properties.setProperty("default.driver", "org.h2.Driver"); properties.setProperty("default.url", getJdbcConnection()); properties.setProperty("default.user", ""); properties.setProperty("default.password", ""); properties.setProperty(DEFAULT_PRECODE, "select 1"); properties.setProperty("incorrect.driver", "org.h2.Driver"); properties.setProperty("incorrect.url", getJdbcConnection()); properties.setProperty("incorrect.user", ""); properties.setProperty("incorrect.password", ""); properties.setProperty(String.format(PRECODE_KEY_TEMPLATE, "incorrect"), "incorrect command"); JDBCInterpreter jdbcInterpreter = new JDBCInterpreter(properties); jdbcInterpreter.open(); InterpreterResult interpreterResult = jdbcInterpreter.executePrecode(context); assertEquals(InterpreterResult.Code.ERROR, interpreterResult.code()); assertEquals(InterpreterResult.Type.TEXT, interpreterResult.message().get(0).getType()); } @Test void testStatementPrecode() throws IOException, InterpreterException { Properties properties = new Properties(); properties.setProperty("default.driver", "org.h2.Driver"); properties.setProperty("default.url", getJdbcConnection()); properties.setProperty("default.user", ""); properties.setProperty("default.password", ""); properties.setProperty(DEFAULT_STATEMENT_PRECODE, "set @v='statement'"); JDBCInterpreter jdbcInterpreter = new JDBCInterpreter(properties); jdbcInterpreter.open(); String sqlQuery = "select @v"; InterpreterResult interpreterResult = jdbcInterpreter.interpret(sqlQuery, context); assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code()); List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage(); assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(0).getType()); assertEquals("@V\nstatement\n", resultMessages.get(0).getData()); } @Test void testIncorrectStatementPrecode() throws IOException, InterpreterException { Properties properties = new Properties(); properties.setProperty("default.driver", "org.h2.Driver"); properties.setProperty("default.url", getJdbcConnection()); properties.setProperty("default.user", ""); properties.setProperty("default.password", ""); properties.setProperty(DEFAULT_STATEMENT_PRECODE, "set incorrect"); JDBCInterpreter jdbcInterpreter = new JDBCInterpreter(properties); jdbcInterpreter.open(); String sqlQuery = "select 1"; InterpreterResult interpreterResult = jdbcInterpreter.interpret(sqlQuery, context); assertEquals(InterpreterResult.Code.ERROR, interpreterResult.code()); assertEquals(InterpreterResult.Type.TEXT, interpreterResult.message().get(0).getType()); assertTrue(interpreterResult.message().get(0).getData().contains("Syntax error"), interpreterResult.toString()); } @Test void testSplitSqlQueryWithComments() throws IOException, InterpreterException { Properties properties = new Properties(); properties.setProperty("common.max_count", "1000"); properties.setProperty("common.max_retry", "3"); properties.setProperty("default.driver", "org.h2.Driver"); properties.setProperty("default.url", getJdbcConnection()); properties.setProperty("default.user", ""); properties.setProperty("default.password", ""); properties.setProperty("default.splitQueries", "true"); JDBCInterpreter t = new JDBCInterpreter(properties); t.open(); String sqlQuery = "/* ; */\n" + "-- /* comment\n" + "--select * from test_table\n" + "select * from test_table; /* some comment ; */\n" + "/*\n" + "select * from test_table;\n" + "*/\n" + "-- a ; b\n" + "select * from test_table WHERE ID = ';--';\n" + "select * from test_table WHERE ID = '/*'; -- test"; InterpreterResult interpreterResult = t.interpret(sqlQuery, context); assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code()); List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage(); assertEquals(3, resultMessages.size()); } @Test void testValidateConnectionUrl() throws IOException, InterpreterException { Properties properties = new Properties(); properties.setProperty("default.driver", "org.h2.Driver"); properties.setProperty("default.url", getJdbcConnection() + ";allowLoadLocalInfile=true"); properties.setProperty("default.user", ""); properties.setProperty("default.password", ""); JDBCInterpreter jdbcInterpreter = new JDBCInterpreter(properties); jdbcInterpreter.open(); InterpreterResult interpreterResult = jdbcInterpreter.interpret("SELECT 1", context); assertEquals(InterpreterResult.Code.ERROR, interpreterResult.code()); assertEquals("Connection URL contains improper configuration", interpreterResult.message().get(0).getData()); } @Test void testValidateConnectionUrlEncoded() throws IOException, InterpreterException { Properties properties = new Properties(); properties.setProperty("default.driver", "org.h2.Driver"); properties.setProperty("default.url", getJdbcConnection() + ";%61llowLoadLocalInfile=true"); properties.setProperty("default.user", ""); properties.setProperty("default.password", ""); JDBCInterpreter jdbcInterpreter = new JDBCInterpreter(properties); jdbcInterpreter.open(); InterpreterResult interpreterResult = jdbcInterpreter.interpret("SELECT 1", context); assertEquals(InterpreterResult.Code.ERROR, interpreterResult.code()); assertEquals("Connection URL contains improper configuration", interpreterResult.message().get(0).getData()); } private InterpreterContext getInterpreterContext() { return InterpreterContext.builder() .setAuthenticationInfo(new AuthenticationInfo("testUser")) .setParagraphId("paragraphId") .setInterpreterOut(new InterpreterOutput()) .build(); } }
googleapis/google-cloud-java
35,047
java-dialogflow-cx/google-cloud-dialogflow-cx/src/test/java/com/google/cloud/dialogflow/cx/v3beta1/ExperimentsClientTest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.dialogflow.cx.v3beta1; import static com.google.cloud.dialogflow.cx.v3beta1.ExperimentsClient.ListExperimentsPagedResponse; import static com.google.cloud.dialogflow.cx.v3beta1.ExperimentsClient.ListLocationsPagedResponse; import com.google.api.gax.core.NoCredentialsProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.testing.LocalChannelProvider; import com.google.api.gax.grpc.testing.MockGrpcService; import com.google.api.gax.grpc.testing.MockServiceHelper; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.InvalidArgumentException; import com.google.cloud.location.GetLocationRequest; import com.google.cloud.location.ListLocationsRequest; import com.google.cloud.location.ListLocationsResponse; import com.google.cloud.location.Location; import com.google.common.collect.Lists; import com.google.protobuf.AbstractMessage; import com.google.protobuf.Any; import com.google.protobuf.Duration; import com.google.protobuf.Empty; import com.google.protobuf.FieldMask; import com.google.protobuf.Timestamp; import io.grpc.StatusRuntimeException; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.UUID; import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @Generated("by gapic-generator-java") public class ExperimentsClientTest { private static MockExperiments mockExperiments; private static MockLocations mockLocations; private static MockServiceHelper mockServiceHelper; private LocalChannelProvider channelProvider; private ExperimentsClient client; @BeforeClass public static void startStaticServer() { mockExperiments = new MockExperiments(); mockLocations = new MockLocations(); mockServiceHelper = new MockServiceHelper( UUID.randomUUID().toString(), Arrays.<MockGrpcService>asList(mockExperiments, mockLocations)); mockServiceHelper.start(); } @AfterClass public static void stopServer() { mockServiceHelper.stop(); } @Before public void setUp() throws IOException { mockServiceHelper.reset(); channelProvider = mockServiceHelper.createChannelProvider(); ExperimentsSettings settings = ExperimentsSettings.newBuilder() .setTransportChannelProvider(channelProvider) .setCredentialsProvider(NoCredentialsProvider.create()) .build(); client = ExperimentsClient.create(settings); } @After public void tearDown() throws Exception { client.close(); } @Test public void listExperimentsTest() throws Exception { Experiment responsesElement = Experiment.newBuilder().build(); ListExperimentsResponse expectedResponse = ListExperimentsResponse.newBuilder() .setNextPageToken("") .addAllExperiments(Arrays.asList(responsesElement)) .build(); mockExperiments.addResponse(expectedResponse); EnvironmentName parent = EnvironmentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]"); ListExperimentsPagedResponse pagedListResponse = client.listExperiments(parent); List<Experiment> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getExperimentsList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockExperiments.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListExperimentsRequest actualRequest = ((ListExperimentsRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listExperimentsExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockExperiments.addException(exception); try { EnvironmentName parent = EnvironmentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]"); client.listExperiments(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listExperimentsTest2() throws Exception { Experiment responsesElement = Experiment.newBuilder().build(); ListExperimentsResponse expectedResponse = ListExperimentsResponse.newBuilder() .setNextPageToken("") .addAllExperiments(Arrays.asList(responsesElement)) .build(); mockExperiments.addResponse(expectedResponse); String parent = "parent-995424086"; ListExperimentsPagedResponse pagedListResponse = client.listExperiments(parent); List<Experiment> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getExperimentsList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockExperiments.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListExperimentsRequest actualRequest = ((ListExperimentsRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listExperimentsExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockExperiments.addException(exception); try { String parent = "parent-995424086"; client.listExperiments(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getExperimentTest() throws Exception { Experiment expectedResponse = Experiment.newBuilder() .setName( ExperimentName.of( "[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]") .toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .setDefinition(Experiment.Definition.newBuilder().build()) .setRolloutConfig(RolloutConfig.newBuilder().build()) .setRolloutState(RolloutState.newBuilder().build()) .setRolloutFailureReason("rolloutFailureReason958797373") .setResult(Experiment.Result.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) .setLastUpdateTime(Timestamp.newBuilder().build()) .setExperimentLength(Duration.newBuilder().build()) .addAllVariantsHistory(new ArrayList<VariantsHistory>()) .build(); mockExperiments.addResponse(expectedResponse); ExperimentName name = ExperimentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]"); Experiment actualResponse = client.getExperiment(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockExperiments.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetExperimentRequest actualRequest = ((GetExperimentRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getExperimentExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockExperiments.addException(exception); try { ExperimentName name = ExperimentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]"); client.getExperiment(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getExperimentTest2() throws Exception { Experiment expectedResponse = Experiment.newBuilder() .setName( ExperimentName.of( "[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]") .toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .setDefinition(Experiment.Definition.newBuilder().build()) .setRolloutConfig(RolloutConfig.newBuilder().build()) .setRolloutState(RolloutState.newBuilder().build()) .setRolloutFailureReason("rolloutFailureReason958797373") .setResult(Experiment.Result.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) .setLastUpdateTime(Timestamp.newBuilder().build()) .setExperimentLength(Duration.newBuilder().build()) .addAllVariantsHistory(new ArrayList<VariantsHistory>()) .build(); mockExperiments.addResponse(expectedResponse); String name = "name3373707"; Experiment actualResponse = client.getExperiment(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockExperiments.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetExperimentRequest actualRequest = ((GetExperimentRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getExperimentExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockExperiments.addException(exception); try { String name = "name3373707"; client.getExperiment(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void createExperimentTest() throws Exception { Experiment expectedResponse = Experiment.newBuilder() .setName( ExperimentName.of( "[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]") .toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .setDefinition(Experiment.Definition.newBuilder().build()) .setRolloutConfig(RolloutConfig.newBuilder().build()) .setRolloutState(RolloutState.newBuilder().build()) .setRolloutFailureReason("rolloutFailureReason958797373") .setResult(Experiment.Result.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) .setLastUpdateTime(Timestamp.newBuilder().build()) .setExperimentLength(Duration.newBuilder().build()) .addAllVariantsHistory(new ArrayList<VariantsHistory>()) .build(); mockExperiments.addResponse(expectedResponse); EnvironmentName parent = EnvironmentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]"); Experiment experiment = Experiment.newBuilder().build(); Experiment actualResponse = client.createExperiment(parent, experiment); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockExperiments.getRequests(); Assert.assertEquals(1, actualRequests.size()); CreateExperimentRequest actualRequest = ((CreateExperimentRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertEquals(experiment, actualRequest.getExperiment()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void createExperimentExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockExperiments.addException(exception); try { EnvironmentName parent = EnvironmentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]"); Experiment experiment = Experiment.newBuilder().build(); client.createExperiment(parent, experiment); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void createExperimentTest2() throws Exception { Experiment expectedResponse = Experiment.newBuilder() .setName( ExperimentName.of( "[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]") .toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .setDefinition(Experiment.Definition.newBuilder().build()) .setRolloutConfig(RolloutConfig.newBuilder().build()) .setRolloutState(RolloutState.newBuilder().build()) .setRolloutFailureReason("rolloutFailureReason958797373") .setResult(Experiment.Result.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) .setLastUpdateTime(Timestamp.newBuilder().build()) .setExperimentLength(Duration.newBuilder().build()) .addAllVariantsHistory(new ArrayList<VariantsHistory>()) .build(); mockExperiments.addResponse(expectedResponse); String parent = "parent-995424086"; Experiment experiment = Experiment.newBuilder().build(); Experiment actualResponse = client.createExperiment(parent, experiment); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockExperiments.getRequests(); Assert.assertEquals(1, actualRequests.size()); CreateExperimentRequest actualRequest = ((CreateExperimentRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertEquals(experiment, actualRequest.getExperiment()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void createExperimentExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockExperiments.addException(exception); try { String parent = "parent-995424086"; Experiment experiment = Experiment.newBuilder().build(); client.createExperiment(parent, experiment); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void updateExperimentTest() throws Exception { Experiment expectedResponse = Experiment.newBuilder() .setName( ExperimentName.of( "[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]") .toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .setDefinition(Experiment.Definition.newBuilder().build()) .setRolloutConfig(RolloutConfig.newBuilder().build()) .setRolloutState(RolloutState.newBuilder().build()) .setRolloutFailureReason("rolloutFailureReason958797373") .setResult(Experiment.Result.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) .setLastUpdateTime(Timestamp.newBuilder().build()) .setExperimentLength(Duration.newBuilder().build()) .addAllVariantsHistory(new ArrayList<VariantsHistory>()) .build(); mockExperiments.addResponse(expectedResponse); Experiment experiment = Experiment.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); Experiment actualResponse = client.updateExperiment(experiment, updateMask); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockExperiments.getRequests(); Assert.assertEquals(1, actualRequests.size()); UpdateExperimentRequest actualRequest = ((UpdateExperimentRequest) actualRequests.get(0)); Assert.assertEquals(experiment, actualRequest.getExperiment()); Assert.assertEquals(updateMask, actualRequest.getUpdateMask()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void updateExperimentExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockExperiments.addException(exception); try { Experiment experiment = Experiment.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); client.updateExperiment(experiment, updateMask); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void deleteExperimentTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); mockExperiments.addResponse(expectedResponse); ExperimentName name = ExperimentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]"); client.deleteExperiment(name); List<AbstractMessage> actualRequests = mockExperiments.getRequests(); Assert.assertEquals(1, actualRequests.size()); DeleteExperimentRequest actualRequest = ((DeleteExperimentRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void deleteExperimentExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockExperiments.addException(exception); try { ExperimentName name = ExperimentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]"); client.deleteExperiment(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void deleteExperimentTest2() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); mockExperiments.addResponse(expectedResponse); String name = "name3373707"; client.deleteExperiment(name); List<AbstractMessage> actualRequests = mockExperiments.getRequests(); Assert.assertEquals(1, actualRequests.size()); DeleteExperimentRequest actualRequest = ((DeleteExperimentRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void deleteExperimentExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockExperiments.addException(exception); try { String name = "name3373707"; client.deleteExperiment(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void startExperimentTest() throws Exception { Experiment expectedResponse = Experiment.newBuilder() .setName( ExperimentName.of( "[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]") .toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .setDefinition(Experiment.Definition.newBuilder().build()) .setRolloutConfig(RolloutConfig.newBuilder().build()) .setRolloutState(RolloutState.newBuilder().build()) .setRolloutFailureReason("rolloutFailureReason958797373") .setResult(Experiment.Result.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) .setLastUpdateTime(Timestamp.newBuilder().build()) .setExperimentLength(Duration.newBuilder().build()) .addAllVariantsHistory(new ArrayList<VariantsHistory>()) .build(); mockExperiments.addResponse(expectedResponse); ExperimentName name = ExperimentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]"); Experiment actualResponse = client.startExperiment(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockExperiments.getRequests(); Assert.assertEquals(1, actualRequests.size()); StartExperimentRequest actualRequest = ((StartExperimentRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void startExperimentExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockExperiments.addException(exception); try { ExperimentName name = ExperimentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]"); client.startExperiment(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void startExperimentTest2() throws Exception { Experiment expectedResponse = Experiment.newBuilder() .setName( ExperimentName.of( "[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]") .toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .setDefinition(Experiment.Definition.newBuilder().build()) .setRolloutConfig(RolloutConfig.newBuilder().build()) .setRolloutState(RolloutState.newBuilder().build()) .setRolloutFailureReason("rolloutFailureReason958797373") .setResult(Experiment.Result.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) .setLastUpdateTime(Timestamp.newBuilder().build()) .setExperimentLength(Duration.newBuilder().build()) .addAllVariantsHistory(new ArrayList<VariantsHistory>()) .build(); mockExperiments.addResponse(expectedResponse); String name = "name3373707"; Experiment actualResponse = client.startExperiment(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockExperiments.getRequests(); Assert.assertEquals(1, actualRequests.size()); StartExperimentRequest actualRequest = ((StartExperimentRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void startExperimentExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockExperiments.addException(exception); try { String name = "name3373707"; client.startExperiment(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void stopExperimentTest() throws Exception { Experiment expectedResponse = Experiment.newBuilder() .setName( ExperimentName.of( "[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]") .toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .setDefinition(Experiment.Definition.newBuilder().build()) .setRolloutConfig(RolloutConfig.newBuilder().build()) .setRolloutState(RolloutState.newBuilder().build()) .setRolloutFailureReason("rolloutFailureReason958797373") .setResult(Experiment.Result.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) .setLastUpdateTime(Timestamp.newBuilder().build()) .setExperimentLength(Duration.newBuilder().build()) .addAllVariantsHistory(new ArrayList<VariantsHistory>()) .build(); mockExperiments.addResponse(expectedResponse); ExperimentName name = ExperimentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]"); Experiment actualResponse = client.stopExperiment(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockExperiments.getRequests(); Assert.assertEquals(1, actualRequests.size()); StopExperimentRequest actualRequest = ((StopExperimentRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void stopExperimentExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockExperiments.addException(exception); try { ExperimentName name = ExperimentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]"); client.stopExperiment(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void stopExperimentTest2() throws Exception { Experiment expectedResponse = Experiment.newBuilder() .setName( ExperimentName.of( "[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[EXPERIMENT]") .toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .setDefinition(Experiment.Definition.newBuilder().build()) .setRolloutConfig(RolloutConfig.newBuilder().build()) .setRolloutState(RolloutState.newBuilder().build()) .setRolloutFailureReason("rolloutFailureReason958797373") .setResult(Experiment.Result.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) .setLastUpdateTime(Timestamp.newBuilder().build()) .setExperimentLength(Duration.newBuilder().build()) .addAllVariantsHistory(new ArrayList<VariantsHistory>()) .build(); mockExperiments.addResponse(expectedResponse); String name = "name3373707"; Experiment actualResponse = client.stopExperiment(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockExperiments.getRequests(); Assert.assertEquals(1, actualRequests.size()); StopExperimentRequest actualRequest = ((StopExperimentRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void stopExperimentExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockExperiments.addException(exception); try { String name = "name3373707"; client.stopExperiment(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listLocationsTest() throws Exception { Location responsesElement = Location.newBuilder().build(); ListLocationsResponse expectedResponse = ListLocationsResponse.newBuilder() .setNextPageToken("") .addAllLocations(Arrays.asList(responsesElement)) .build(); mockLocations.addResponse(expectedResponse); ListLocationsRequest request = ListLocationsRequest.newBuilder() .setName("name3373707") .setFilter("filter-1274492040") .setPageSize(883849137) .setPageToken("pageToken873572522") .build(); ListLocationsPagedResponse pagedListResponse = client.listLocations(request); List<Location> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getLocationsList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockLocations.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListLocationsRequest actualRequest = ((ListLocationsRequest) actualRequests.get(0)); Assert.assertEquals(request.getName(), actualRequest.getName()); Assert.assertEquals(request.getFilter(), actualRequest.getFilter()); Assert.assertEquals(request.getPageSize(), actualRequest.getPageSize()); Assert.assertEquals(request.getPageToken(), actualRequest.getPageToken()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listLocationsExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockLocations.addException(exception); try { ListLocationsRequest request = ListLocationsRequest.newBuilder() .setName("name3373707") .setFilter("filter-1274492040") .setPageSize(883849137) .setPageToken("pageToken873572522") .build(); client.listLocations(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getLocationTest() throws Exception { Location expectedResponse = Location.newBuilder() .setName("name3373707") .setLocationId("locationId1541836720") .setDisplayName("displayName1714148973") .putAllLabels(new HashMap<String, String>()) .setMetadata(Any.newBuilder().build()) .build(); mockLocations.addResponse(expectedResponse); GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build(); Location actualResponse = client.getLocation(request); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockLocations.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetLocationRequest actualRequest = ((GetLocationRequest) actualRequests.get(0)); Assert.assertEquals(request.getName(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getLocationExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockLocations.addException(exception); try { GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build(); client.getLocation(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } }
openjdk/jmc
34,004
application/org.openjdk.jmc.flightrecorder.ui/src/main/java/org/openjdk/jmc/flightrecorder/ui/pages/SocketIOPage.java
/* * Copyright (c) 2018, 2025, Oracle and/or its affiliates. All rights reserved. * * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * The contents of this file are subject to the terms of either the Universal Permissive License * v 1.0 as shown at https://oss.oracle.com/licenses/upl * * or the following license: * * Redistribution and use in source and binary forms, with or without modification, are permitted * provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this list of conditions * and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, this list of * conditions and the following disclaimer in the documentation and/or other materials provided with * the distribution. * * 3. Neither the name of the copyright holder nor the names of its contributors may be used to * endorse or promote products derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY * WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.openjdk.jmc.flightrecorder.ui.pages; import static org.openjdk.jmc.flightrecorder.jdk.JdkAttributes.IO_ADDRESS; import static org.openjdk.jmc.flightrecorder.jdk.JdkAttributes.IO_PORT; import java.awt.Color; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.function.Consumer; import java.util.function.Supplier; import org.eclipse.jface.action.IAction; import org.eclipse.jface.resource.ImageDescriptor; import org.eclipse.osgi.util.NLS; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CTabFolder; import org.eclipse.swt.custom.CTabItem; import org.eclipse.swt.custom.SashForm; import org.eclipse.swt.layout.FillLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.ui.forms.widgets.Form; import org.eclipse.ui.forms.widgets.FormToolkit; import org.openjdk.jmc.common.IDisplayable; import org.openjdk.jmc.common.IState; import org.openjdk.jmc.common.IWritableState; import org.openjdk.jmc.common.item.IAccessorFactory; import org.openjdk.jmc.common.item.IAttribute; import org.openjdk.jmc.common.item.IItem; import org.openjdk.jmc.common.item.IItemCollection; import org.openjdk.jmc.common.item.IItemFilter; import org.openjdk.jmc.common.item.ItemCollectionToolkit; import org.openjdk.jmc.common.item.ItemFilters; import org.openjdk.jmc.common.unit.IQuantity; import org.openjdk.jmc.common.unit.IRange; import org.openjdk.jmc.common.unit.QuantitiesToolkit; import org.openjdk.jmc.common.unit.UnitLookup; import org.openjdk.jmc.common.util.ColorToolkit; import org.openjdk.jmc.common.util.StateToolkit; import org.openjdk.jmc.flightrecorder.JfrAttributes; import org.openjdk.jmc.flightrecorder.jdk.JdkAggregators; import org.openjdk.jmc.flightrecorder.jdk.JdkAttributes; import org.openjdk.jmc.flightrecorder.jdk.JdkFilters; import org.openjdk.jmc.flightrecorder.jdk.JdkTypeIDs; import org.openjdk.jmc.flightrecorder.rules.util.JfrRuleTopics; import org.openjdk.jmc.flightrecorder.ui.FlightRecorderUI; import org.openjdk.jmc.flightrecorder.ui.IDataPageFactory; import org.openjdk.jmc.flightrecorder.ui.IDisplayablePage; import org.openjdk.jmc.flightrecorder.ui.IPageContainer; import org.openjdk.jmc.flightrecorder.ui.IPageDefinition; import org.openjdk.jmc.flightrecorder.ui.IPageUI; import org.openjdk.jmc.flightrecorder.ui.StreamModel; import org.openjdk.jmc.flightrecorder.ui.common.AbstractDataPage; import org.openjdk.jmc.flightrecorder.ui.common.CompositeKeyAccessorFactory; import org.openjdk.jmc.flightrecorder.ui.common.DataPageToolkit; import org.openjdk.jmc.flightrecorder.ui.common.DurationPercentileTable; import org.openjdk.jmc.flightrecorder.ui.common.DurationPercentileTable.DurationPercentileTableBuilder; import org.openjdk.jmc.flightrecorder.ui.common.FilterComponent; import org.openjdk.jmc.flightrecorder.ui.common.FlavorSelector; import org.openjdk.jmc.flightrecorder.ui.common.FlavorSelector.FlavorSelectorState; import org.openjdk.jmc.flightrecorder.ui.common.ImageConstants; import org.openjdk.jmc.flightrecorder.ui.common.ItemHistogram; import org.openjdk.jmc.flightrecorder.ui.common.ItemHistogram.HistogramSelection; import org.openjdk.jmc.flightrecorder.ui.common.ItemHistogram.ItemHistogramBuilder; import org.openjdk.jmc.flightrecorder.ui.common.ItemHistogramWithInput; import org.openjdk.jmc.flightrecorder.ui.common.ItemList; import org.openjdk.jmc.flightrecorder.ui.common.ItemList.ItemListBuilder; import org.openjdk.jmc.flightrecorder.ui.common.TypeLabelProvider; import org.openjdk.jmc.flightrecorder.ui.messages.internal.Messages; import org.openjdk.jmc.flightrecorder.ui.selection.SelectionStoreActionToolkit; import org.openjdk.jmc.ui.charts.IXDataRenderer; import org.openjdk.jmc.ui.charts.RendererToolkit; import org.openjdk.jmc.ui.charts.XYChart; import org.openjdk.jmc.ui.column.ColumnManager.SelectionState; import org.openjdk.jmc.ui.column.ColumnMenusFactory; import org.openjdk.jmc.ui.column.TableSettings; import org.openjdk.jmc.ui.column.TableSettings.ColumnSettings; import org.openjdk.jmc.ui.handlers.ActionToolkit; import org.openjdk.jmc.ui.handlers.MCContextMenuManager; import org.openjdk.jmc.ui.layout.SimpleLayout; import org.openjdk.jmc.ui.layout.SimpleLayoutData; import org.openjdk.jmc.ui.misc.ChartCanvas; import org.openjdk.jmc.ui.misc.PersistableSashForm; public class SocketIOPage extends AbstractDataPage { public static class SocketIOPageFactory implements IDataPageFactory { @Override public String getName(IState state) { return Messages.SocketIOPage_PAGE_NAME; } @Override public ImageDescriptor getImageDescriptor(IState state) { return FlightRecorderUI.getDefault().getMCImageDescriptor(ImageConstants.PAGE_IO); } @Override public String[] getTopics(IState state) { return new String[] {JfrRuleTopics.SOCKET_IO}; } @Override public IDisplayablePage createPage(IPageDefinition dpd, StreamModel items, IPageContainer editor) { return new SocketIOPage(dpd, items, editor); } } private static final Color WRITE_COLOR = TypeLabelProvider.getColor(JdkTypeIDs.SOCKET_WRITE); private static final Color READ_COLOR = TypeLabelProvider.getColor(JdkTypeIDs.SOCKET_READ); private static final Color WRITE_ALPHA_COLOR = ColorToolkit.withAlpha(WRITE_COLOR, 80); private static final Color READ_ALPHA_COLOR = ColorToolkit.withAlpha(READ_COLOR, 80); private static final IItemFilter TABLE_ITEMS = ItemFilters.type(JdkTypeIDs.SOCKET_READ, JdkTypeIDs.SOCKET_WRITE); private static final String TOTAL_TIME = "totalTime"; //$NON-NLS-1$ private static final String MAX_TIME = "maxTime"; //$NON-NLS-1$ private static final String AVG_TIME = "avgTime"; //$NON-NLS-1$ private static final String STDDEV_TIME = "stddevTime"; //$NON-NLS-1$ private static final String READ_COUNT = "readCount"; //$NON-NLS-1$ private static final String WRITE_COUNT = "writeCount"; //$NON-NLS-1$ private static final String READ_SIZE = "readSize"; //$NON-NLS-1$ private static final String WRITE_SIZE = "writeSize"; //$NON-NLS-1$ private static final String READ_EOS = "endOfStream"; //$NON-NLS-1$ private static final String IO_TIMEOUT = "timeout"; //$NON-NLS-1$ private static final String PERCENTILE_READ_TIME = "percentileReadTime"; //$NON-NLS-1$ private static final String PERCENTILE_READ_COUNT = "percentileReadCount"; //$NON-NLS-1$ private static final String PERCENTILE_WRITE_TIME = "percentileWriteTime"; //$NON-NLS-1$ private static final String PERCENTILE_WRITE_COUNT = "percentileWriteCount"; //$NON-NLS-1$ private static final IAccessorFactory<IDisplayable> HOST_AND_PORT_AF = CompositeKeyAccessorFactory.displayable( " : ", JdkAttributes.IO_ADDRESS, //$NON-NLS-1$ JdkAttributes.IO_PORT); private static final ItemHistogramBuilder HISTOGRAM = new ItemHistogramBuilder(); private static final ItemListBuilder LIST = new ItemListBuilder(); private static final DurationPercentileTableBuilder PERCENTILES = new DurationPercentileTableBuilder(); static { HISTOGRAM.addCountColumn(); HISTOGRAM.addColumn(TOTAL_TIME, JdkAggregators.TOTAL_IO_TIME); HISTOGRAM.addColumn(MAX_TIME, JdkAggregators.MAX_IO_TIME); HISTOGRAM.addColumn(AVG_TIME, JdkAggregators.AVG_IO_TIME); HISTOGRAM.addColumn(STDDEV_TIME, JdkAggregators.STDDEV_IO_TIME); HISTOGRAM.addColumn(READ_COUNT, JdkAggregators.SOCKET_READ_COUNT); HISTOGRAM.addColumn(WRITE_COUNT, JdkAggregators.SOCKET_WRITE_COUNT); HISTOGRAM.addColumn(READ_SIZE, JdkAggregators.SOCKET_READ_SIZE); HISTOGRAM.addColumn(WRITE_SIZE, JdkAggregators.SOCKET_WRITE_SIZE); // FIXME: Would we like to include # of hosts, # of ports and host name in the new histograms? LIST.addColumn(JdkAttributes.IO_ADDRESS); LIST.addColumn(JdkAttributes.IO_HOST); LIST.addColumn(JdkAttributes.IO_PORT); LIST.addColumn(JfrAttributes.START_TIME); LIST.addColumn(JfrAttributes.END_TIME); LIST.addColumn(JfrAttributes.DURATION); LIST.addColumn(JdkAttributes.IO_SOCKET_BYTES_READ); LIST.addColumn(JdkAttributes.IO_SOCKET_BYTES_WRITTEN); LIST.addColumn(JfrAttributes.EVENT_THREAD); LIST.addColumn(JdkAttributes.IO_SOCKET_READ_EOS); LIST.addColumn(JdkAttributes.IO_TIMEOUT); PERCENTILES.addSeries(PERCENTILE_READ_TIME, Messages.SocketIOPage_ROW_SOCKET_READ, PERCENTILE_READ_COUNT, JdkAggregators.SOCKET_READ_COUNT.getName(), JdkTypeIDs.SOCKET_READ); PERCENTILES.addSeries(PERCENTILE_WRITE_TIME, Messages.SocketIOPage_ROW_SOCKET_WRITE, PERCENTILE_WRITE_COUNT, JdkAggregators.SOCKET_WRITE_COUNT.getName(), JdkTypeIDs.SOCKET_WRITE); } private enum HistogramType { HOST, PORT, HOST_AND_PORT } private class IOPageUi implements IPageUI { private static final String PRIMARY_FILTER = "primaryFilter"; //$NON-NLS-1$ private static final String SECONDARY_FILTER = "secondaryFilter"; //$NON-NLS-1$ private static final String EVENT_FILTER = "eventFilter"; //$NON-NLS-1$ private static final String SASH_ELEMENT = "sash"; //$NON-NLS-1$ private static final String LIST_ELEMENT = "eventList"; //$NON-NLS-1$ private static final String SOCKETIO_TABLE_ELEMENT = "socketTable"; //$NON-NLS-1$ private static final String SECONDARY_SOCKETIO_TABLE_ELEMENT = "secondarySocketTable"; //$NON-NLS-1$ private static final String PERCENTILE_TABLE_ELEMENT = "percentileTable"; //$NON-NLS-1$ private static final String HISTGRAM_TYPE = "histogramType"; //$NON-NLS-1$ private final ChartCanvas timelineCanvas; private final ChartCanvas durationCanvas; private final ChartCanvas sizeCanvas; private final ItemList itemList; private final SashForm sash; private final IPageContainer pageContainer; private final Composite histogramParent; private ItemHistogram primaryHistogram; private Supplier<TableSettings> secondaryHistogramSettings; private Consumer<IItemCollection> itemConsumerRoot; private HistogramType histogramType; private ItemHistogram secondaryHistogram; private FilterComponent primaryFilter; private FilterComponent secondaryFilter; private FilterComponent eventFilter; private IRange<IQuantity> timeRange; private IItemCollection selectionItems; private XYChart timelineChart; private XYChart durationChart; private XYChart sizeChart; private CTabFolder tabFolder; private FlavorSelector flavorSelector; private DurationPercentileTable percentileTable; private Composite durationParent; IOPageUi(Composite parent, FormToolkit toolkit, IPageContainer pageContainer, IState state) { this.pageContainer = pageContainer; Form form = DataPageToolkit.createForm(parent, toolkit, getName(), getIcon()); sash = new SashForm(form.getBody(), SWT.VERTICAL); toolkit.adapt(sash); histogramParent = toolkit.createComposite(sash); histogramParent.setLayout(new FillLayout(SWT.VERTICAL)); histogramType = StateToolkit.readEnum(state, HISTGRAM_TYPE, HistogramType.HOST, HistogramType.class); buildHistograms(TableSettings.forState(state.getChild(SOCKETIO_TABLE_ELEMENT)), TableSettings.forState(state.getChild(SECONDARY_SOCKETIO_TABLE_ELEMENT))); tabFolder = new CTabFolder(sash, SWT.NONE); toolkit.adapt(tabFolder); CTabItem t1 = new CTabItem(tabFolder, SWT.NONE); t1.setToolTipText(Messages.IO_PAGE_TIMELINE_DESCRIPTION); timelineCanvas = new ChartCanvas(tabFolder); t1.setText(Messages.PAGES_TIMELINE); t1.setControl(timelineCanvas); DataPageToolkit.createChartTimestampTooltip(timelineCanvas); timelineChart = new XYChart(pageContainer.getRecordingRange(), RendererToolkit.empty(), 180); timelineChart.setVisibleRange(timelineRange.getStart(), timelineRange.getEnd()); timelineChart.addVisibleRangeListener(r -> timelineRange = r); IItemCollection socketItems = getDataSource().getItems().apply(JdkFilters.SOCKET_READ_OR_WRITE); // FIXME: X-auto-range should be done properly IQuantity max = socketItems.getAggregate(JdkAggregators.LONGEST_EVENT); // FIXME: Workaround to make max value included max = max == null ? UnitLookup.MILLISECOND.quantity(20) : max.add(UnitLookup.MILLISECOND.quantity(20)); durationChart = new XYChart(UnitLookup.MILLISECOND.quantity(0), max, RendererToolkit.empty(), 180); durationChart.setVisibleRange(durationRange.getStart(), durationRange.getEnd()); durationChart.addVisibleRangeListener(r -> durationRange = r); buildChart(); CTabItem t2 = new CTabItem(tabFolder, SWT.NONE); durationParent = toolkit.createComposite(tabFolder); durationParent.setLayout(new SimpleLayout()); t2.setToolTipText(Messages.IO_PAGE_DURATIONS_DESCRIPTION); durationCanvas = new ChartCanvas(durationParent); durationCanvas.setLayoutData(new SimpleLayoutData(3.5f)); DataPageToolkit.createChartTooltip(durationCanvas); DataPageToolkit.setChart(durationCanvas, durationChart, JfrAttributes.DURATION, pageContainer::showSelection); SelectionStoreActionToolkit.addSelectionStoreActions(pageContainer.getSelectionStore(), durationChart, JfrAttributes.DURATION, Messages.SocketIOPage_DURATION_SELECTION, durationCanvas.getContextMenu()); percentileTable = PERCENTILES.build(durationParent, TableSettings.forState(state.getChild(PERCENTILE_TABLE_ELEMENT))); percentileTable.getManager().getViewer().getControl().setLayoutData(new SimpleLayoutData(6.5f)); MCContextMenuManager percentileTableMm = MCContextMenuManager .create(percentileTable.getManager().getViewer().getControl()); ColumnMenusFactory.addDefaultMenus(percentileTable.getManager(), percentileTableMm); SelectionStoreActionToolkit.addSelectionStoreActions(percentileTable.getManager().getViewer(), pageContainer.getSelectionStore(), percentileTable::getSelectedItems, Messages.SocketIOPage_PERCENTILE_SELECTION, percentileTableMm); percentileTable.getManager().setSelectionState(percentileSelection); t2.setText(Messages.PAGES_DURATIONS); t2.setControl(durationParent); IQuantity sizeMax = QuantitiesToolkit.maxPresent( socketItems.getAggregate(JdkAggregators.SOCKET_READ_LARGEST), socketItems.getAggregate(JdkAggregators.SOCKET_WRITE_LARGEST)); // FIXME: Workaround to make max value included sizeMax = sizeMax == null ? UnitLookup.BYTE.quantity(64) : sizeMax.add(UnitLookup.BYTE.quantity(64)); sizeChart = new XYChart(UnitLookup.BYTE.quantity(0), sizeMax, RendererToolkit.empty(), 180); sizeChart.setVisibleRange(sizeRange.getStart(), sizeMax); sizeChart.addVisibleRangeListener(range -> sizeRange = range); CTabItem t3 = new CTabItem(tabFolder, SWT.NONE); t3.setToolTipText(Messages.IO_PAGE_SIZE_DESCRIPTION); sizeCanvas = new ChartCanvas(tabFolder); t3.setText(Messages.PAGES_SIZE); t3.setControl(sizeCanvas); DataPageToolkit.createChartTooltip(sizeCanvas); DataPageToolkit.setChart(sizeCanvas, sizeChart, JdkAttributes.IO_SIZE, pageContainer::showSelection); SelectionStoreActionToolkit.addSelectionStoreActions(pageContainer.getSelectionStore(), sizeChart, JdkAttributes.IO_SIZE, Messages.SocketIOPage_SIZE_SELECTION, sizeCanvas.getContextMenu()); CTabItem t4 = new CTabItem(tabFolder, SWT.NONE); t4.setToolTipText(Messages.IO_PAGE_EVENT_LOG_DESCRIPTION); itemList = LIST.buildWithoutBorder(tabFolder, getTableSettings(state.getChild(LIST_ELEMENT))); MCContextMenuManager itemListMm = MCContextMenuManager .create(itemList.getManager().getViewer().getControl()); ColumnMenusFactory.addDefaultMenus(itemList.getManager(), itemListMm); SelectionStoreActionToolkit.addSelectionStoreActions(pageContainer.getSelectionStore(), itemList, Messages.SocketIOPage_LOG_SELECTION, itemListMm); itemList.getManager().getViewer().addSelectionChangedListener( e -> pageContainer.showSelection(ItemCollectionToolkit.build(itemList.getSelection().get()))); t4.setText(Messages.PAGES_EVENT_LOG); eventFilter = FilterComponent.createFilterComponent(itemList, itemListFilter, getDataSource().getItems().apply(TABLE_ITEMS), pageContainer.getSelectionStore()::getSelections, this::onEventFilterChange); itemListMm.add(eventFilter.getShowFilterAction()); itemListMm.add(eventFilter.getShowSearchAction()); t4.setControl(eventFilter.getComponent()); eventFilter.loadState(state.getChild(EVENT_FILTER)); onEventFilterChange(itemListFilter); itemList.getManager().setSelectionState(itemListSelection); tabFolder.setSelection(tabFolderIndex); PersistableSashForm.loadState(sash, state.getChild(SASH_ELEMENT)); flavorSelector = FlavorSelector.itemsWithTimerange(form, TABLE_ITEMS, getDataSource().getItems(), pageContainer, this::onInputSelected, this::onUseRange, flavorSelectorState); form.getToolBarManager() .appendToGroup(DataPageToolkit.FORM_TOOLBAR_PAGE_SETUP, buildHistogramTypeAction(HistogramType.HOST, Messages.SocketIOPage_BY_HOST_ACTION, FlightRecorderUI.getDefault().getMCImageDescriptor(ImageConstants.ICON_IO_BY_HOST))); form.getToolBarManager() .appendToGroup(DataPageToolkit.FORM_TOOLBAR_PAGE_SETUP, buildHistogramTypeAction(HistogramType.PORT, Messages.SocketIOPage_BY_PORT_ACTION, FlightRecorderUI.getDefault().getMCImageDescriptor(ImageConstants.ICON_IO_BY_PORT))); form.getToolBarManager().appendToGroup(DataPageToolkit.FORM_TOOLBAR_PAGE_SETUP, buildHistogramTypeAction( HistogramType.HOST_AND_PORT, Messages.SocketIOPage_BY_HOST_AND_PORT_ACTION, FlightRecorderUI.getDefault().getMCImageDescriptor(ImageConstants.ICON_IO_BY_HOST_AND_PORT))); addResultActions(form); } // FIXME: Break out this to a "ConfigurableHistogramUi or something? This is copy-pasted from ExceptionsPage private IAction buildHistogramTypeAction(HistogramType histogramType, String text, ImageDescriptor icon) { IAction a = ActionToolkit.radioAction(() -> setHistogramType(histogramType), text, icon); a.setChecked(histogramType == this.histogramType); return a; } private void setHistogramType(HistogramType histogramType) { if (histogramType != this.histogramType) { primaryTableSelection.put(this.histogramType, primaryHistogram.getManager().getSelectionState()); if (secondaryHistogram != null) { secondaryTableSelection.put(this.histogramType, secondaryHistogram.getManager().getSelectionState()); } this.histogramType = histogramType; TableSettings primarySettings = primaryHistogram.getManager().getSettings(); TableSettings secondarySettings = secondaryHistogramSettings.get(); for (Control c : histogramParent.getChildren()) { c.dispose(); } buildHistograms(primarySettings, secondarySettings); refreshPageItems(); } } private void buildHistograms(TableSettings primarySettings, TableSettings secondarySettings) { if (histogramType == HistogramType.HOST_AND_PORT) { primaryHistogram = HISTOGRAM.buildWithoutBorder(histogramParent, Messages.SocketIOPage_HOST_AND_PORT, UnitLookup.UNKNOWN, HOST_AND_PORT_AF, primarySettings); primaryFilter = FilterComponent.createFilterComponent(primaryHistogram, primaryTableFilter.get(histogramType), getDataSource().getItems().apply(TABLE_ITEMS), pageContainer.getSelectionStore()::getSelections, this::onPrimaryFilterChange); secondaryHistogram = null; secondaryHistogramSettings = () -> secondarySettings; secondaryFilter = null; onPrimaryFilterChange(primaryTableFilter.get(histogramType)); primaryHistogram.getManager().setSelectionState(primaryTableSelection.get(histogramType)); itemConsumerRoot = ItemHistogramWithInput.chain(primaryHistogram, this::updateChartAndListDetails); } else { SashForm s2 = new SashForm(histogramParent, SWT.VERTICAL); IAttribute<?> masterAttr = histogramType == HistogramType.HOST ? IO_ADDRESS : IO_PORT; IAttribute<?> slaveAttr = histogramType == HistogramType.PORT ? IO_ADDRESS : IO_PORT; primaryHistogram = HISTOGRAM.buildWithoutBorder(s2, masterAttr, primarySettings); primaryFilter = FilterComponent.createFilterComponent(primaryHistogram, primaryTableFilter.get(histogramType), getDataSource().getItems().apply(TABLE_ITEMS), pageContainer.getSelectionStore()::getSelections, this::onPrimaryFilterChange); secondaryHistogram = HISTOGRAM.buildWithoutBorder(s2, slaveAttr, secondarySettings); secondaryFilter = FilterComponent.createFilterComponent(secondaryHistogram, secondaryTableFilter.get(histogramType), getDataSource().getItems().apply(TABLE_ITEMS), pageContainer.getSelectionStore()::getSelections, this::onSecondaryFilterChange); secondaryHistogramSettings = secondaryHistogram.getManager()::getSettings; onPrimaryFilterChange(primaryTableFilter.get(histogramType)); onSecondaryFilterChange(secondaryTableFilter.get(histogramType)); primaryHistogram.getManager().setSelectionState(primaryTableSelection.get(histogramType)); secondaryHistogram.getManager().setSelectionState(secondaryTableSelection.get(histogramType)); itemConsumerRoot = ItemHistogramWithInput.chain(primaryHistogram, this::updateChartAndListDetails, secondaryHistogram); addContextMenu(secondaryHistogram, secondaryFilter.getShowFilterAction(), secondaryFilter.getShowSearchAction()); secondaryFilter.loadState(getState().getChild(SECONDARY_FILTER)); } addContextMenu(primaryHistogram, primaryFilter.getShowFilterAction(), primaryFilter.getShowSearchAction()); primaryFilter.loadState(getState().getChild(PRIMARY_FILTER)); histogramParent.layout(); } private void addContextMenu(ItemHistogram h, IAction ... actions) { MCContextMenuManager mm = MCContextMenuManager.create(h.getManager().getViewer().getControl()); ColumnMenusFactory.addDefaultMenus(h.getManager(), mm); SelectionStoreActionToolkit.addSelectionStoreActions(pageContainer.getSelectionStore(), h, Messages.SocketIOPage_HISTOGRAM_SELECTION, mm); for (IAction action : actions) { mm.add(action); } } private void onPrimaryFilterChange(IItemFilter filter) { primaryFilter.filterChangeHelper(filter, primaryHistogram, getDataSource().getItems().apply(TABLE_ITEMS)); if (secondaryFilter != null) { secondaryFilter.notifyListener(); } primaryTableFilter.put(histogramType, filter); } private void onSecondaryFilterChange(IItemFilter filter) { secondaryFilter.filterChangeHelper(filter, secondaryHistogram, getDataSource().getItems().apply(TABLE_ITEMS)); secondaryTableFilter.put(histogramType, filter); } private void onEventFilterChange(IItemFilter filter) { eventFilter.filterChangeHelper(filter, itemList, getDataSource().getItems().apply(TABLE_ITEMS)); itemListFilter = filter; } @Override public void saveTo(IWritableState writableState) { StateToolkit.writeEnum(writableState, HISTGRAM_TYPE, histogramType); PersistableSashForm.saveState(sash, writableState.createChild(SASH_ELEMENT)); primaryHistogram.getManager().getSettings().saveState(writableState.createChild(SOCKETIO_TABLE_ELEMENT)); primaryFilter.saveState(writableState.createChild(PRIMARY_FILTER)); Optional.ofNullable(secondaryHistogramSettings.get()).ifPresent( settings -> settings.saveState(writableState.createChild(SECONDARY_SOCKETIO_TABLE_ELEMENT))); if (secondaryFilter != null) { secondaryFilter.saveState(writableState.createChild(SECONDARY_FILTER)); } itemList.getManager().getSettings().saveState(writableState.createChild(LIST_ELEMENT)); percentileTable.getManager().getSettings().saveState(writableState.createChild(PERCENTILE_TABLE_ELEMENT)); eventFilter.saveState(writableState.createChild(EVENT_FILTER)); saveToLocal(); } private void saveToLocal() { primaryTableSelection.put(histogramType, primaryHistogram.getManager().getSelectionState()); if (secondaryHistogram != null) { secondaryTableSelection.put(histogramType, secondaryHistogram.getManager().getSelectionState()); } itemListSelection = itemList.getManager().getSelectionState(); tabFolderIndex = tabFolder.getSelectionIndex(); flavorSelectorState = flavorSelector.getFlavorSelectorState(); percentileSelection = percentileTable.getManager().getSelectionState(); } private void onUseRange(Boolean show) { IRange<IQuantity> range = show ? timeRange : pageContainer.getRecordingRange(); timelineChart.setVisibleRange(range.getStart(), range.getEnd()); buildChart(); } private void buildChart() { DataPageToolkit.setChart(timelineCanvas, timelineChart, selection -> pageContainer.showSelection(selection)); SelectionStoreActionToolkit.addSelectionStoreRangeActions(pageContainer.getSelectionStore(), timelineChart, JfrAttributes.LIFETIME, Messages.SocketIOPage_TIMELINE_SELECTION, timelineCanvas.getContextMenu()); } private void onInputSelected(IItemCollection items, IRange<IQuantity> timeRange) { this.selectionItems = items; this.timeRange = timeRange; refreshPageItems(); } private void refreshPageItems() { IItemCollection items = selectionItems != null ? selectionItems : getDataSource().getItems(); IItemCollection filteredItems = items.apply(JdkFilters.SOCKET_READ_OR_WRITE); itemConsumerRoot.accept(filteredItems); } private void updateChartAndListDetails(IItemCollection selectedItems) { String hostCount = hostPortCount(); List<IXDataRenderer> timelineRows = new ArrayList<>(); List<IXDataRenderer> durationRows = new ArrayList<>(); List<IXDataRenderer> sizeRows = new ArrayList<>(); IItemCollection readItems = selectedItems.apply(JdkFilters.SOCKET_READ); if (readItems.hasItems()) { timelineRows.add(DataPageToolkit.buildSizeRow(Messages.SocketIOPage_ROW_SOCKET_READ + hostCount, JdkAggregators.SOCKET_READ_SIZE.getDescription(), readItems, JdkAggregators.SOCKET_READ_SIZE, READ_COLOR, SocketIOPage::getColor)); durationRows .add(DataPageToolkit.buildDurationHistogram(Messages.SocketIOPage_ROW_SOCKET_READ + hostCount, JdkAggregators.SOCKET_READ_COUNT.getDescription(), readItems, JdkAggregators.SOCKET_READ_COUNT, READ_COLOR)); sizeRows.add(DataPageToolkit.buildSizeHistogram(Messages.SocketIOPage_ROW_SOCKET_READ + hostCount, JdkAggregators.SOCKET_READ_COUNT.getDescription(), readItems, JdkAggregators.SOCKET_READ_COUNT, READ_COLOR, JdkAttributes.IO_SOCKET_BYTES_READ)); } IItemCollection writeItems = selectedItems.apply(JdkFilters.SOCKET_WRITE); if (writeItems.hasItems()) { timelineRows.add(DataPageToolkit.buildSizeRow(Messages.SocketIOPage_ROW_SOCKET_WRITE + hostCount, JdkAggregators.SOCKET_WRITE_SIZE.getDescription(), writeItems, JdkAggregators.SOCKET_WRITE_SIZE, WRITE_COLOR, SocketIOPage::getColor)); durationRows .add(DataPageToolkit.buildDurationHistogram(Messages.SocketIOPage_ROW_SOCKET_WRITE + hostCount, JdkAggregators.SOCKET_WRITE_COUNT.getDescription(), writeItems, JdkAggregators.SOCKET_WRITE_COUNT, WRITE_COLOR)); sizeRows.add(DataPageToolkit.buildSizeHistogram(Messages.SocketIOPage_ROW_SOCKET_WRITE + hostCount, JdkAggregators.SOCKET_WRITE_COUNT.getDescription(), writeItems, JdkAggregators.SOCKET_WRITE_COUNT, WRITE_COLOR, JdkAttributes.IO_SOCKET_BYTES_WRITTEN)); } if (timelineCanvas != null) { timelineCanvas.replaceRenderer(RendererToolkit.uniformRows(timelineRows)); durationCanvas.replaceRenderer(RendererToolkit.uniformRows(durationRows)); sizeCanvas.replaceRenderer(RendererToolkit.uniformRows(sizeRows)); itemList.show(selectedItems); pageContainer.showSelection(selectedItems); percentileTable.update(selectedItems); } } public String hostPortCount() { HistogramSelection hostSelection = histogramType == HistogramType.HOST ? primaryHistogram.getSelection() : histogramType == HistogramType.PORT ? secondaryHistogram.getSelection() : null; HistogramSelection portSelection = histogramType == HistogramType.PORT ? primaryHistogram.getSelection() : histogramType == HistogramType.HOST ? secondaryHistogram.getSelection() : null; HistogramSelection hostPortSelection = histogramType == HistogramType.HOST_AND_PORT ? primaryHistogram.getSelection() : null; return hostPortCount(hostSelection != null ? hostSelection.getRowCount() : 0, portSelection != null ? portSelection.getRowCount() : 0, hostPortSelection != null ? hostPortSelection.getRowCount() : 0); } public String hostPortCount(int hostCount, int portCount, int hostPortCount) { switch (hostPortCount) { case 0: switch (hostCount) { case 0: switch (portCount) { case 0: return ""; //$NON-NLS-1$ case 1: return " (" + Messages.SocketIOPage_SELECTED_PORT + ")"; //$NON-NLS-1$ //$NON-NLS-2$ default: return " (" + NLS.bind(Messages.SocketIOPage_SELECTED_PORTS, portCount) + ")"; //$NON-NLS-1$ //$NON-NLS-2$ } case 1: switch (portCount) { case 0: return " (" + Messages.SocketIOPage_SELECTED_HOST + ")"; //$NON-NLS-1$ //$NON-NLS-2$ case 1: return " (" + Messages.SocketIOPage_SELECTED_HOST_AND_PORT + ")"; //$NON-NLS-1$ //$NON-NLS-2$ default: return " (" + NLS.bind(Messages.SocketIOPage_SELECTED_HOST_AND_PORTS, portCount) + ")"; //$NON-NLS-1$ //$NON-NLS-2$ } default: switch (portCount) { case 0: return " (" + NLS.bind(Messages.SocketIOPage_SELECTED_HOSTS, hostCount) + ")"; //$NON-NLS-1$ //$NON-NLS-2$ case 1: return " (" + NLS.bind(Messages.SocketIOPage_SELECTED_HOSTS_AND_PORT, hostCount) + ")"; //$NON-NLS-1$ //$NON-NLS-2$ default: return " (" + NLS.bind(Messages.SocketIOPage_SELECTED_HOSTS_AND_PORTS, hostCount, portCount) //$NON-NLS-1$ + ")"; //$NON-NLS-1$ } } default: return " (" + NLS.bind(Messages.SocketIOPage_SELECTED_HOSTS_PORTS, hostPortCount) + ")"; //$NON-NLS-1$ //$NON-NLS-2$ } } } private static TableSettings getTableSettings(IState state) { if (state == null) { return new TableSettings(TOTAL_TIME, Arrays.asList( new ColumnSettings(ItemHistogram.KEY_COL_ID, false, 500, null), new ColumnSettings(TOTAL_TIME, true, 120, false), new ColumnSettings(MAX_TIME, false, 120, false), new ColumnSettings(AVG_TIME, false, 120, false), new ColumnSettings(STDDEV_TIME, false, 120, false), new ColumnSettings(READ_COUNT, false, 120, false), new ColumnSettings(WRITE_COUNT, false, 120, false), new ColumnSettings(READ_SIZE, false, 120, false), new ColumnSettings(WRITE_SIZE, false, 120, false), new ColumnSettings(READ_EOS, false, 80, false), new ColumnSettings(IO_TIMEOUT, false, 50, false))); } else { return new TableSettings(state); } } @Override public IPageUI display(Composite parent, FormToolkit toolkit, IPageContainer pageContainer, IState state) { return new IOPageUi(parent, toolkit, pageContainer, state); } private Map<HistogramType, SelectionState> primaryTableSelection; private Map<HistogramType, SelectionState> secondaryTableSelection; private SelectionState itemListSelection; private SelectionState percentileSelection; private Map<HistogramType, IItemFilter> primaryTableFilter; private Map<HistogramType, IItemFilter> secondaryTableFilter; private IItemFilter itemListFilter; private IRange<IQuantity> timelineRange; private IRange<IQuantity> durationRange; private IRange<IQuantity> sizeRange; private int tabFolderIndex = 0; public FlavorSelectorState flavorSelectorState; public SocketIOPage(IPageDefinition dpd, StreamModel items, IPageContainer editor) { super(dpd, items, editor); primaryTableSelection = new HashMap<>(); secondaryTableSelection = new HashMap<>(); primaryTableFilter = new HashMap<>(); secondaryTableFilter = new HashMap<>(); timelineRange = editor.getRecordingRange(); durationRange = editor.getRecordingRange(); sizeRange = DataPageToolkit.buildSizeRange(items.getItems(), true); } @Override public IItemFilter getDefaultSelectionFilter() { return TABLE_ITEMS; } private static Color getColor(IItem item) { return JdkTypeIDs.SOCKET_READ.equals(item.getType().getIdentifier()) ? READ_ALPHA_COLOR : WRITE_ALPHA_COLOR; } }
apache/xmlgraphics-batik
35,373
batik-bridge/src/main/java/org/apache/batik/bridge/CSSUtilities.java
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.batik.bridge; import java.awt.AlphaComposite; import java.awt.Color; import java.awt.Composite; import java.awt.Cursor; import java.awt.RenderingHints; import java.awt.geom.GeneralPath; import java.awt.geom.Rectangle2D; import org.apache.batik.anim.dom.SVGOMDocument; import org.apache.batik.css.engine.CSSEngine; import org.apache.batik.css.engine.CSSStylableElement; import org.apache.batik.css.engine.SVGCSSEngine; import org.apache.batik.css.engine.value.ListValue; import org.apache.batik.css.engine.value.Value; import org.apache.batik.ext.awt.MultipleGradientPaint; import org.apache.batik.ext.awt.image.renderable.ClipRable; import org.apache.batik.ext.awt.image.renderable.Filter; import org.apache.batik.gvt.CompositeGraphicsNode; import org.apache.batik.gvt.GraphicsNode; import org.apache.batik.gvt.filter.Mask; import org.apache.batik.util.CSSConstants; import org.apache.batik.constants.XMLConstants; import org.w3c.dom.Element; import org.w3c.dom.css.CSSPrimitiveValue; import org.w3c.dom.css.CSSValue; /** * A collection of utility method involving CSS property. The listed * methods bellow could be used as convenient methods to create * concrete objects regarding to CSS properties. * * @author <a href="mailto:tkormann@apache.org">Thierry Kormann</a> * @version $Id$ */ public abstract class CSSUtilities implements CSSConstants, ErrorConstants, XMLConstants { /** * No instance of this class is required. */ protected CSSUtilities() {} ///////////////////////////////////////////////////////////////////////// // Global methods ///////////////////////////////////////////////////////////////////////// /** * Returns CSSEngine associated to the specified element. * @param e the element */ public static CSSEngine getCSSEngine(Element e) { return ((SVGOMDocument)e.getOwnerDocument()).getCSSEngine(); } /** * Returns the computed style of the given property. */ public static Value getComputedStyle(Element e, int property) { CSSEngine engine = getCSSEngine(e); if (engine == null) return null; return engine.getComputedStyle((CSSStylableElement)e, null, property); } ///////////////////////////////////////////////////////////////////////// // 'pointer-events' ///////////////////////////////////////////////////////////////////////// /** * Returns the type that describes how this graphics node reacts to events. * * @return GraphicsNode.VISIBLE_PAINTED | * GraphicsNode.VISIBLE_FILL | * GraphicsNode.VISIBLE_STROKE | * GraphicsNode.VISIBLE | * GraphicsNode.PAINTED | * GraphicsNode.FILL | * GraphicsNode.STROKE | * GraphicsNode.ALL | * GraphicsNode.NONE */ public static int convertPointerEvents(Element e) { Value v = getComputedStyle(e, SVGCSSEngine.POINTER_EVENTS_INDEX); String s = v.getStringValue(); switch(s.charAt(0)) { case 'v': if (s.length() == 7) { return GraphicsNode.VISIBLE; } else { switch(s.charAt(7)) { case 'p': return GraphicsNode.VISIBLE_PAINTED; case 'f': return GraphicsNode.VISIBLE_FILL; case 's': return GraphicsNode.VISIBLE_STROKE; default: // can't be reached throw new IllegalStateException("unexpected event, must be one of (p,f,s) is:" + s.charAt(7) ); } } case 'p': return GraphicsNode.PAINTED; case 'f': return GraphicsNode.FILL; case 's': return GraphicsNode.STROKE; case 'a': return GraphicsNode.ALL; case 'n': return GraphicsNode.NONE; default: // can't be reached throw new IllegalStateException("unexpected event, must be one of (v,p,f,s,a,n) is:" + s.charAt(0) ); } } ///////////////////////////////////////////////////////////////////////// // 'enable-background' ///////////////////////////////////////////////////////////////////////// /** * Returns the subregion of user space where access to the * background image is allowed to happen. * * @param e the container element */ public static Rectangle2D convertEnableBackground(Element e /*, UnitProcessor.Context uctx*/) { Value v = getComputedStyle(e, SVGCSSEngine.ENABLE_BACKGROUND_INDEX); if (v.getCssValueType() != CSSValue.CSS_VALUE_LIST) { return null; // accumulate } ListValue lv = (ListValue)v; int length = lv.getLength(); switch (length) { case 1: return CompositeGraphicsNode.VIEWPORT; // new case 5: // new <x>,<y>,<width>,<height> float x = lv.item(1).getFloatValue(); float y = lv.item(2).getFloatValue(); float w = lv.item(3).getFloatValue(); float h = lv.item(4).getFloatValue(); return new Rectangle2D.Float(x, y, w, h); default: throw new IllegalStateException("Unexpected length:" + length ); // Cannot happen } } ///////////////////////////////////////////////////////////////////////// // 'color-interpolation-filters' ///////////////////////////////////////////////////////////////////////// /** * Returns the color space for the specified filter element. Checks the * 'color-interpolation-filters' property. * * @param e the element * @return true if the color space is linear, false otherwise (sRGB). */ public static boolean convertColorInterpolationFilters(Element e) { Value v = getComputedStyle(e, SVGCSSEngine.COLOR_INTERPOLATION_FILTERS_INDEX); return CSS_LINEARRGB_VALUE == v.getStringValue(); } ///////////////////////////////////////////////////////////////////////// // 'color-interpolation' ///////////////////////////////////////////////////////////////////////// /** * Returns the color space for the specified element. Checks the * 'color-interpolation' property * * @param e the element */ public static MultipleGradientPaint.ColorSpaceEnum convertColorInterpolation(Element e) { Value v = getComputedStyle(e, SVGCSSEngine.COLOR_INTERPOLATION_INDEX); return (CSS_LINEARRGB_VALUE == v.getStringValue()) ? MultipleGradientPaint.LINEAR_RGB : MultipleGradientPaint.SRGB; } ///////////////////////////////////////////////////////////////////////// // 'cursor' ///////////////////////////////////////////////////////////////////////// /** * Checks if the cursor property on the input element is set to auto */ public static boolean isAutoCursor(Element e) { Value cursorValue = CSSUtilities.getComputedStyle(e, SVGCSSEngine.CURSOR_INDEX); boolean isAuto = false; if (cursorValue != null){ if( cursorValue.getCssValueType() == CSSValue.CSS_PRIMITIVE_VALUE && cursorValue.getPrimitiveType() == CSSPrimitiveValue.CSS_IDENT && cursorValue.getStringValue().charAt(0) == 'a' ) { isAuto = true; } else if ( cursorValue.getCssValueType() == CSSValue.CSS_VALUE_LIST && cursorValue.getLength() == 1) { Value lValue = cursorValue.item(0); if (lValue != null && lValue.getCssValueType() == CSSValue.CSS_PRIMITIVE_VALUE && lValue.getPrimitiveType() == CSSPrimitiveValue.CSS_IDENT && lValue.getStringValue().charAt(0) == 'a') { isAuto = true; } } } return isAuto; } /** * Returns the Cursor corresponding to the input element's * cursor property * * @param e the element */ public static Cursor convertCursor(Element e, BridgeContext ctx) { return ctx.getCursorManager().convertCursor(e); } //////////////////////////////////////////////////////////////////////// // 'color-rendering', 'text-rendering', 'image-rendering', // 'shape-rendering' //////////////////////////////////////////////////////////////////////// /** * Fills the rendering hints for the specified shape element or do * nothing none has been specified. Checks the 'shape-rendering' * property. If the given RenderingHints is null, a new * RenderingHints is created. * * <p>Here is how the mapping between SVG rendering hints and the Java2D * rendering hints is done:</p> * * <dl> * <dt>'optimizeSpeed':</dt> * <dd> * <ul> * <li>KEY_RENDERING=VALUE_RENDER_SPEED</li> * <li>KEY_ANTIALIASING=VALUE_ANTIALIAS_OFF</li> * </ul> * </dd> * <dt>'crispEdges':</dt> * <dd> * <ul> * <li>KEY_RENDERING=VALUE_RENDER_DEFAULT</li> * <li>KEY_ANTIALIASING=VALUE_ANTIALIAS_OFF</li> * </ul> * </dd> * <dt>'geometricPrecision':</dt> * <dd> * <ul> * <li>KEY_RENDERING=VALUE_RENDER_QUALITY</li> * <li>KEY_ANTIALIASING=VALUE_ANTIALIAS_ON</li> * </ul> * </dd> * </dl> * * @param e the element * @param hints a RenderingHints to fill, or null. */ public static RenderingHints convertShapeRendering(Element e, RenderingHints hints) { Value v = getComputedStyle(e, SVGCSSEngine.SHAPE_RENDERING_INDEX); String s = v.getStringValue(); int len = s.length(); if ((len == 4) && (s.charAt(0) == 'a')) // auto return hints; if (len < 10) return hints; // Unknown. if (hints == null) hints = new RenderingHints(null); switch(s.charAt(0)) { case 'o': // optimizeSpeed hints.put(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_SPEED); hints.put(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF); break; case 'c': // crispEdges hints.put(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_DEFAULT); hints.put(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF); break; case 'g': // geometricPrecision hints.put(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY); hints.put(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); hints.put(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_PURE); break; } return hints; } /** * Fills the rendering hints for the specified text element or do * nothing if none has been specified. If the given RenderingHints * is null, a new one is created. Checks the 'text-rendering' * property. * * <p>Here is how the mapping between SVG rendering hints and the Java2D * rendering hints is done:</p> * * <dl> * <dt>'optimizeSpeed':</dt> * <dd> * <ul> * <li>KEY_RENDERING=VALUE_RENDER_SPEED</li> * <li>KEY_ANTIALIASING=VALUE_ANTIALIAS_OFF</li> * <li>KEY_TEXT_ANTIALIASING=VALUE_TEXT_ANTIALIAS_OFF</li> * <li>KEY_FRACTIONALMETRICS=VALUE_FRACTIONALMETRICS_OFF</li> * </ul> * </dd> * <dt>'optimizeLegibility':</dt> * <dd> * <ul> * <li>KEY_RENDERING=VALUE_RENDER_QUALITY</li> * <li>KEY_ANTIALIASING=VALUE_ANTIALIAS_ON</li> * <li>KEY_TEXT_ANTIALIASING=VALUE_TEXT_ANTIALIAS_ON</li> * <li>KEY_FRACTIONALMETRICS=VALUE_FRACTIONALMETRICS_OFF</li> * </ul> * </dd> * <dt>'geometricPrecision':</dt> * <dd> * <ul> * <li>KEY_RENDERING=VALUE_RENDER_QUALITY</li> * <li>KEY_ANTIALIASING=VALUE_ANTIALIAS_DEFAULT</li> * <li>KEY_TEXT_ANTIALIASING=VALUE_TEXT_ANTIALIAS_DEFAULT</li> * <li>KEY_FRACTIONALMETRICS=VALUE_FRACTIONALMETRICS_ON</li> * </ul> * </dd> * </dl> * * <p>Note that for text both KEY_TEXT_ANTIALIASING and * KEY_ANTIALIASING are set as there is no guarantee that a Java2D * text rendering primitive will be used to draw text (eg. SVG * Font...).</p> * * @param e the element * @param hints a RenderingHints to fill, or null. */ public static RenderingHints convertTextRendering(Element e, RenderingHints hints) { Value v = getComputedStyle(e, SVGCSSEngine.TEXT_RENDERING_INDEX); String s = v.getStringValue(); int len = s.length(); if ((len == 4) && (s.charAt(0) == 'a')) // auto return hints; if (len < 13) return hints; // Unknown. if (hints == null) hints = new RenderingHints(null); switch(s.charAt(8)) { case 's': // optimizeSpeed hints.put(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_SPEED); hints.put(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_OFF); hints.put(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF); // hints.put(RenderingHints.KEY_FRACTIONALMETRICS, // RenderingHints.VALUE_FRACTIONALMETRICS_OFF); break; case 'l': // optimizeLegibility hints.put(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY); hints.put(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_OFF); hints.put(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); // hints.put(RenderingHints.KEY_FRACTIONALMETRICS, // RenderingHints.VALUE_FRACTIONALMETRICS_OFF); break; case 'c': // geometricPrecision hints.put(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY); hints.put(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON); hints.put(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); hints.put(RenderingHints.KEY_FRACTIONALMETRICS, RenderingHints.VALUE_FRACTIONALMETRICS_ON); hints.put(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_PURE); break; } return hints; } /** * Fills the rendering hints for the specified image element or do * nothing if none has been specified. If the given RenderingHints * is null, a new one is created. Checks the 'image-rendering' * property. * * <p>Here is how the mapping between SVG rendering hints and the Java2D * rendering hints is done:</p> * * <dl> * <dt>'optimizeSpeed':</dt> * <dd> * <ul> * <li>KEY_RENDERING=VALUE_RENDER_SPEED</li> * <li>KEY_INTERPOLATION=VALUE_INTERPOLATION_NEAREST_NEIGHBOR</li> * </ul> * </dd> * <dt>'optimizeQuality':</dt> * <dd> * <ul> * <li>KEY_RENDERING=VALUE_RENDER_QUALITY</li> * <li>KEY_INTERPOLATION=VALUE_INTERPOLATION_BICUBIC</li> * </ul> * </dd> * </dl> * * @param e the element * @param hints a RenderingHints to fill, or null. */ public static RenderingHints convertImageRendering(Element e, RenderingHints hints) { Value v = getComputedStyle(e, SVGCSSEngine.IMAGE_RENDERING_INDEX); String s = v.getStringValue(); int len = s.length(); if ((len == 4) && (s.charAt(0) == 'a')) // auto return hints; if (len < 13) return hints; // Unknown. if (hints == null) hints = new RenderingHints(null); switch(s.charAt(8)) { case 's': // optimizeSpeed hints.put(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_SPEED); hints.put(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_NEAREST_NEIGHBOR); break; case 'q': // optimizeQuality hints.put(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY); hints.put(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BICUBIC); break; } return hints; } /** * Fills the rendering hints for the specified element or do * nothing if none has been specified. If the given RenderingHints * is null, a new one is created. Checks the 'color-rendering' * property. * * <p>Here is how the mapping between SVG rendering hints and the Java2D * rendering hints is done:</p> * * <dl> * <dt>'optimizeSpeed':</dt> * <dd> * <ul> * <li>KEY_COLOR_RENDERING=VALUE_COLOR_RENDER_SPEED</li> * <li>KEY_ALPHA_INTERPOLATION=VALUE_ALPHA_INTERPOLATION_SPEED</li> * </ul> * </dd> * <dt>'optimizeQuality':</dt> * <dd> * <ul> * <li>KEY_COLOR_RENDERING=VALUE_COLOR_RENDER_QUALITY</li> * <li>KEY_ALPHA_INTERPOLATION=VALUE_ALPHA_INTERPOLATION_QUALITY</li> * </ul> * </dd> * </dl> * * @param e the element * @param hints a RenderingHints to fill, or null. */ public static RenderingHints convertColorRendering(Element e, RenderingHints hints) { Value v = getComputedStyle(e, SVGCSSEngine.COLOR_RENDERING_INDEX); String s = v.getStringValue(); int len = s.length(); if ((len == 4) && (s.charAt(0) == 'a')) // auto return hints; if (len < 13) return hints; // Unknown. if (hints == null) hints = new RenderingHints(null); switch(s.charAt(8)) { case 's': // optimizeSpeed hints.put(RenderingHints.KEY_COLOR_RENDERING, RenderingHints.VALUE_COLOR_RENDER_SPEED); hints.put(RenderingHints.KEY_ALPHA_INTERPOLATION, RenderingHints.VALUE_ALPHA_INTERPOLATION_SPEED); break; case 'q': // optimizeQuality hints.put(RenderingHints.KEY_COLOR_RENDERING, RenderingHints.VALUE_COLOR_RENDER_QUALITY); hints.put(RenderingHints.KEY_ALPHA_INTERPOLATION, RenderingHints.VALUE_ALPHA_INTERPOLATION_QUALITY); break; } return hints; } ///////////////////////////////////////////////////////////////////////// // 'display' ///////////////////////////////////////////////////////////////////////// /** * Returns true if the specified element has to be displayed, false * otherwise. Checks the 'display' property. * * @param e the element */ public static boolean convertDisplay(Element e) { if (!(e instanceof CSSStylableElement)) { return true; } Value v = getComputedStyle(e, SVGCSSEngine.DISPLAY_INDEX); return v.getStringValue().charAt(0) != 'n'; } ///////////////////////////////////////////////////////////////////////// // 'visibility' ///////////////////////////////////////////////////////////////////////// /** * Returns true if the specified element is visible, false * otherwise. Checks the 'visibility' property. * * @param e the element */ public static boolean convertVisibility(Element e) { Value v = getComputedStyle(e, SVGCSSEngine.VISIBILITY_INDEX); return v.getStringValue().charAt(0) == 'v'; } ///////////////////////////////////////////////////////////////////////// // 'opacity' ///////////////////////////////////////////////////////////////////////// public static final Composite TRANSPARENT = AlphaComposite.getInstance(AlphaComposite.SRC_OVER, 0); /** * Returns a composite object that represents the 'opacity' of the * specified element. * * @param e the element */ public static Composite convertOpacity(Element e) { Value v = getComputedStyle(e, SVGCSSEngine.OPACITY_INDEX); float f = v.getFloatValue(); if (f <= 0f) { return TRANSPARENT; } else if (f >= 1.0f) { return AlphaComposite.SrcOver; } else { return AlphaComposite.getInstance(AlphaComposite.SRC_OVER, f); } } ///////////////////////////////////////////////////////////////////////// // 'overflow' and 'clip' ///////////////////////////////////////////////////////////////////////// /** * Returns true if the 'overflow' property indicates that an * additional clip is required, false otherwise. An additional * clip is needed if the 'overflow' property is 'scroll' or * 'hidden'. * * @param e the element with the 'overflow' property */ public static boolean convertOverflow(Element e) { Value v = getComputedStyle(e, SVGCSSEngine.OVERFLOW_INDEX); String s = v.getStringValue(); return (s.charAt(0) == 'h') || (s.charAt(0) == 's'); } /** * Returns an array of floating offsets representing the 'clip' * property or null if 'auto'. The offsets are specified in the * order top, right, bottom, left. * * @param e the element with the 'clip' property */ public static float[] convertClip(Element e) { Value v = getComputedStyle(e, SVGCSSEngine.CLIP_INDEX); int primitiveType = v.getPrimitiveType(); switch ( primitiveType ) { case CSSPrimitiveValue.CSS_RECT: float [] off = new float[4]; off[0] = v.getTop().getFloatValue(); off[1] = v.getRight().getFloatValue(); off[2] = v.getBottom().getFloatValue(); off[3] = v.getLeft().getFloatValue(); return off; case CSSPrimitiveValue.CSS_IDENT: return null; // 'auto' means no offsets default: // can't be reached throw new IllegalStateException("Unexpected primitiveType:" + primitiveType ); } } ///////////////////////////////////////////////////////////////////////// // 'filter' ///////////////////////////////////////////////////////////////////////// /** * Returns a <code>Filter</code> referenced by the specified element * and which applies on the specified graphics node. * Handle the 'filter' property. * * @param filteredElement the element that references the filter * @param filteredNode the graphics node associated to the element * to filter. * @param ctx the bridge context */ public static Filter convertFilter(Element filteredElement, GraphicsNode filteredNode, BridgeContext ctx) { Value v = getComputedStyle(filteredElement, SVGCSSEngine.FILTER_INDEX); int primitiveType = v.getPrimitiveType(); switch ( primitiveType ) { case CSSPrimitiveValue.CSS_IDENT: return null; // 'filter:none' case CSSPrimitiveValue.CSS_URI: String uri = v.getStringValue(); Element filter = ctx.getReferencedElement(filteredElement, uri); Bridge bridge = ctx.getBridge(filter); if (bridge == null || !(bridge instanceof FilterBridge)) { throw new BridgeException(ctx, filteredElement, ERR_CSS_URI_BAD_TARGET, new Object[] {uri}); } return ((FilterBridge)bridge).createFilter(ctx, filter, filteredElement, filteredNode); default: throw new IllegalStateException("Unexpected primitive type:" + primitiveType ); // can't be reached } } ///////////////////////////////////////////////////////////////////////// // 'clip-path' and 'clip-rule' ///////////////////////////////////////////////////////////////////////// /** * Returns a <code>Clip</code> referenced by the specified element and * which applies on the specified graphics node. * Handle the 'clip-path' property. * * @param clippedElement the element that references the clip * @param clippedNode the graphics node associated to the element to clip * @param ctx the bridge context */ public static ClipRable convertClipPath(Element clippedElement, GraphicsNode clippedNode, BridgeContext ctx) { Value v = getComputedStyle(clippedElement, SVGCSSEngine.CLIP_PATH_INDEX); int primitiveType = v.getPrimitiveType(); switch ( primitiveType ) { case CSSPrimitiveValue.CSS_IDENT: return null; // 'clip-path:none' case CSSPrimitiveValue.CSS_URI: String uri = v.getStringValue(); Element cp = ctx.getReferencedElement(clippedElement, uri); Bridge bridge = ctx.getBridge(cp); if (bridge == null || !(bridge instanceof ClipBridge)) { throw new BridgeException(ctx, clippedElement, ERR_CSS_URI_BAD_TARGET, new Object[] {uri}); } return ((ClipBridge)bridge).createClip(ctx, cp, clippedElement, clippedNode); default: throw new IllegalStateException("Unexpected primitive type:" + primitiveType ); // can't be reached } } /** * Returns the 'clip-rule' for the specified element. * * @param e the element interested in its a 'clip-rule' * @return GeneralPath.WIND_NON_ZERO | GeneralPath.WIND_EVEN_ODD */ public static int convertClipRule(Element e) { Value v = getComputedStyle(e, SVGCSSEngine.CLIP_RULE_INDEX); return (v.getStringValue().charAt(0) == 'n') ? GeneralPath.WIND_NON_ZERO : GeneralPath.WIND_EVEN_ODD; } ///////////////////////////////////////////////////////////////////////// // 'mask' ///////////////////////////////////////////////////////////////////////// /** * Returns a <code>Mask</code> referenced by the specified element and * which applies on the specified graphics node. * Handle the 'mask' property. * * @param maskedElement the element that references the mask * @param maskedNode the graphics node associated to the element to mask * @param ctx the bridge context */ public static Mask convertMask(Element maskedElement, GraphicsNode maskedNode, BridgeContext ctx) { Value v = getComputedStyle(maskedElement, SVGCSSEngine.MASK_INDEX); int primitiveType = v.getPrimitiveType(); switch ( primitiveType ) { case CSSPrimitiveValue.CSS_IDENT: return null; // 'mask:none' case CSSPrimitiveValue.CSS_URI: String uri = v.getStringValue(); Element m = ctx.getReferencedElement(maskedElement, uri); Bridge bridge = ctx.getBridge(m); if (bridge == null || !(bridge instanceof MaskBridge)) { throw new BridgeException(ctx, maskedElement, ERR_CSS_URI_BAD_TARGET, new Object[] {uri}); } return ((MaskBridge)bridge).createMask(ctx, m, maskedElement, maskedNode); default: throw new IllegalStateException("Unexpected primitive type:" + primitiveType ); // can't be reached } } /** * Returns the 'fill-rule' for the specified element. * * @param e the element interested in its a 'fill-rule' * @return GeneralPath.WIND_NON_ZERO | GeneralPath.WIND_EVEN_ODD */ public static int convertFillRule(Element e) { Value v = getComputedStyle(e, SVGCSSEngine.FILL_RULE_INDEX); return (v.getStringValue().charAt(0) == 'n') ? GeneralPath.WIND_NON_ZERO : GeneralPath.WIND_EVEN_ODD; } ///////////////////////////////////////////////////////////////////////// // 'lighting-color' ///////////////////////////////////////////////////////////////////////// /** * Converts the color defined on the specified lighting filter element * to a <code>Color</code>. * * @param e the lighting filter element * @param ctx the bridge context */ public static Color convertLightingColor(Element e, BridgeContext ctx) { Value v = getComputedStyle(e, SVGCSSEngine.LIGHTING_COLOR_INDEX); if (v.getCssValueType() == CSSValue.CSS_PRIMITIVE_VALUE) { return PaintServer.convertColor(v, 1); } else { return PaintServer.convertRGBICCColor (e, v.item(0), v.item(1), 1, ctx); } } ///////////////////////////////////////////////////////////////////////// // 'flood-color' and 'flood-opacity' ///////////////////////////////////////////////////////////////////////// /** * Converts the color defined on the specified &lt;feFlood&gt; * element to a <code>Color</code>. * * @param e the feFlood element * @param ctx the bridge context */ public static Color convertFloodColor(Element e, BridgeContext ctx) { Value v = getComputedStyle(e, SVGCSSEngine.FLOOD_COLOR_INDEX); Value o = getComputedStyle(e, SVGCSSEngine.FLOOD_OPACITY_INDEX); float f = PaintServer.convertOpacity(o); if (v.getCssValueType() == CSSValue.CSS_PRIMITIVE_VALUE) { return PaintServer.convertColor(v, f); } else { return PaintServer.convertRGBICCColor (e, v.item(0), v.item(1), f, ctx); } } ///////////////////////////////////////////////////////////////////////// // 'stop-color' ///////////////////////////////////////////////////////////////////////// /** * Converts the color defined on the specified &lt;stop&gt; element * to a <code>Color</code>. * * @param e the stop element * @param opacity the paint opacity * @param ctx the bridge context to use */ public static Color convertStopColor(Element e, float opacity, BridgeContext ctx) { Value v = getComputedStyle(e, SVGCSSEngine.STOP_COLOR_INDEX); Value o = getComputedStyle(e, SVGCSSEngine.STOP_OPACITY_INDEX); opacity *= PaintServer.convertOpacity(o); if (v.getCssValueType() == CSSValue.CSS_PRIMITIVE_VALUE) { return PaintServer.convertColor(v, opacity); } else { return PaintServer.convertRGBICCColor (e, v.item(0), v.item(1), opacity, ctx); } } ///////////////////////////////////////////////////////////////////////// // CSS support for <use> ///////////////////////////////////////////////////////////////////////// /** * Partially computes the style in the 'def' tree and set it in the 'use' * tree. * <p>Note: This method must be called only when 'use' has been * added to the DOM tree. * * @param refElement the referenced element * @param localRefElement the referenced element in the current document */ public static void computeStyleAndURIs(Element refElement, Element localRefElement, String uri) { // Pull fragement id off first... int idx = uri.indexOf('#'); if (idx != -1) uri = uri.substring(0,idx); // Only set xml:base if we have a real URL. if (uri.length() != 0) localRefElement.setAttributeNS(XML_NAMESPACE_URI, "base", uri); CSSEngine engine = CSSUtilities.getCSSEngine(localRefElement); CSSEngine refEngine = CSSUtilities.getCSSEngine(refElement); engine.importCascadedStyleMaps(refElement, refEngine, localRefElement); } ///////////////////////////////////////////////////////////////////////// // Additional utility methods used internally ///////////////////////////////////////////////////////////////////////// /** * Returns the winding rule represented by the specified CSSValue. * * @param v the value that represents the rule * @return GeneralPath.WIND_NON_ZERO | GeneralPath.WIND_EVEN_ODD */ protected static int rule(CSSValue v) { return (((CSSPrimitiveValue)v).getStringValue().charAt(0) == 'n') ? GeneralPath.WIND_NON_ZERO : GeneralPath.WIND_EVEN_ODD; } }
openjdk/jdk8
35,364
jdk/src/share/classes/javax/swing/plaf/synth/SynthTabbedPaneUI.java
/* * Copyright (c) 2002, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package javax.swing.plaf.synth; import javax.swing.*; import javax.swing.plaf.*; import javax.swing.plaf.basic.*; import javax.swing.text.View; import java.awt.*; import java.awt.event.*; import java.beans.PropertyChangeListener; import java.beans.PropertyChangeEvent; import sun.swing.SwingUtilities2; /** * Provides the Synth L&amp;F UI delegate for * {@link javax.swing.JTabbedPane}. * * <p>Looks up the {@code selectedTabPadInsets} property from the Style, * which represents additional insets for the selected tab. * * @author Scott Violet * @since 1.7 */ public class SynthTabbedPaneUI extends BasicTabbedPaneUI implements PropertyChangeListener, SynthUI { /** * <p>If non-zero, tabOverlap indicates the amount that the tab bounds * should be altered such that they would overlap with a tab on either the * leading or trailing end of a run (ie: in TOP, this would be on the left * or right).</p> * <p>A positive overlap indicates that tabs should overlap right/down, * while a negative overlap indicates tha tabs should overlap left/up.</p> * * <p>When tabOverlap is specified, it both changes the x position and width * of the tab if in TOP or BOTTOM placement, and changes the y position and * height if in LEFT or RIGHT placement.</p> * * <p>This is done for the following reason. Consider a run of 10 tabs. * There are 9 gaps between these tabs. If you specified a tabOverlap of * "-1", then each of the tabs "x" values will be shifted left. This leaves * 9 pixels of space to the right of the right-most tab unpainted. So, each * tab's width is also extended by 1 pixel to make up the difference.</p> * * <p>This property respects the RTL component orientation.</p> */ private int tabOverlap = 0; /** * When a tabbed pane has multiple rows of tabs, this indicates whether * the tabs in the upper row(s) should extend to the base of the tab area, * or whether they should remain at their normal tab height. This does not * affect the bounds of the tabs, only the bounds of area painted by the * tabs. The text position does not change. The result is that the bottom * border of the upper row of tabs becomes fully obscured by the lower tabs, * resulting in a cleaner look. */ private boolean extendTabsToBase = false; private SynthContext tabAreaContext; private SynthContext tabContext; private SynthContext tabContentContext; private SynthStyle style; private SynthStyle tabStyle; private SynthStyle tabAreaStyle; private SynthStyle tabContentStyle; private Rectangle textRect = new Rectangle(); private Rectangle iconRect = new Rectangle(); private Rectangle tabAreaBounds = new Rectangle(); //added for the Nimbus look and feel, where the tab area is painted differently depending on the //state for the selected tab private boolean tabAreaStatesMatchSelectedTab = false; //added for the Nimbus LAF to ensure that the labels don't move whether the tab is selected or not private boolean nudgeSelectedLabel = true; private boolean selectedTabIsPressed = false; /** * Creates a new UI object for the given component. * * @param c component to create UI object for * @return the UI object */ public static ComponentUI createUI(JComponent c) { return new SynthTabbedPaneUI(); } private boolean scrollableTabLayoutEnabled() { return (tabPane.getTabLayoutPolicy() == JTabbedPane.SCROLL_TAB_LAYOUT); } /** * {@inheritDoc} */ @Override protected void installDefaults() { updateStyle(tabPane); } private void updateStyle(JTabbedPane c) { SynthContext context = getContext(c, ENABLED); SynthStyle oldStyle = style; style = SynthLookAndFeel.updateStyle(context, this); // Add properties other than JComponent colors, Borders and // opacity settings here: if (style != oldStyle) { tabRunOverlay = style.getInt(context, "TabbedPane.tabRunOverlay", 0); tabOverlap = style.getInt(context, "TabbedPane.tabOverlap", 0); extendTabsToBase = style.getBoolean(context, "TabbedPane.extendTabsToBase", false); textIconGap = style.getInt(context, "TabbedPane.textIconGap", 0); selectedTabPadInsets = (Insets)style.get(context, "TabbedPane.selectedTabPadInsets"); if (selectedTabPadInsets == null) { selectedTabPadInsets = new Insets(0, 0, 0, 0); } tabAreaStatesMatchSelectedTab = style.getBoolean(context, "TabbedPane.tabAreaStatesMatchSelectedTab", false); nudgeSelectedLabel = style.getBoolean(context, "TabbedPane.nudgeSelectedLabel", true); if (oldStyle != null) { uninstallKeyboardActions(); installKeyboardActions(); } } context.dispose(); if (tabContext != null) { tabContext.dispose(); } tabContext = getContext(c, Region.TABBED_PANE_TAB, ENABLED); this.tabStyle = SynthLookAndFeel.updateStyle(tabContext, this); tabInsets = tabStyle.getInsets(tabContext, null); if (tabAreaContext != null) { tabAreaContext.dispose(); } tabAreaContext = getContext(c, Region.TABBED_PANE_TAB_AREA, ENABLED); this.tabAreaStyle = SynthLookAndFeel.updateStyle(tabAreaContext, this); tabAreaInsets = tabAreaStyle.getInsets(tabAreaContext, null); if (tabContentContext != null) { tabContentContext.dispose(); } tabContentContext = getContext(c, Region.TABBED_PANE_CONTENT, ENABLED); this.tabContentStyle = SynthLookAndFeel.updateStyle(tabContentContext, this); contentBorderInsets = tabContentStyle.getInsets(tabContentContext, null); } /** * {@inheritDoc} */ @Override protected void installListeners() { super.installListeners(); tabPane.addPropertyChangeListener(this); } /** * {@inheritDoc} */ @Override protected void uninstallListeners() { super.uninstallListeners(); tabPane.removePropertyChangeListener(this); } /** * {@inheritDoc} */ @Override protected void uninstallDefaults() { SynthContext context = getContext(tabPane, ENABLED); style.uninstallDefaults(context); context.dispose(); style = null; tabStyle.uninstallDefaults(tabContext); tabContext.dispose(); tabContext = null; tabStyle = null; tabAreaStyle.uninstallDefaults(tabAreaContext); tabAreaContext.dispose(); tabAreaContext = null; tabAreaStyle = null; tabContentStyle.uninstallDefaults(tabContentContext); tabContentContext.dispose(); tabContentContext = null; tabContentStyle = null; } /** * {@inheritDoc} */ @Override public SynthContext getContext(JComponent c) { return getContext(c, SynthLookAndFeel.getComponentState(c)); } private SynthContext getContext(JComponent c, int state) { return SynthContext.getContext(SynthContext.class, c, SynthLookAndFeel.getRegion(c),style, state); } private SynthContext getContext(JComponent c, Region subregion, int state){ SynthStyle style = null; Class klass = SynthContext.class; if (subregion == Region.TABBED_PANE_TAB) { style = tabStyle; } else if (subregion == Region.TABBED_PANE_TAB_AREA) { style = tabAreaStyle; } else if (subregion == Region.TABBED_PANE_CONTENT) { style = tabContentStyle; } return SynthContext.getContext(klass, c, subregion, style, state); } /** * {@inheritDoc} */ @Override protected JButton createScrollButton(int direction) { // added for Nimbus LAF so that it can use the basic arrow buttons // UIManager is queried directly here because this is called before // updateStyle is called so the style can not be queried directly if (UIManager.getBoolean("TabbedPane.useBasicArrows")) { JButton btn = super.createScrollButton(direction); btn.setBorder(BorderFactory.createEmptyBorder()); return btn; } return new SynthScrollableTabButton(direction); } /** * {@inheritDoc} */ @Override public void propertyChange(PropertyChangeEvent e) { if (SynthLookAndFeel.shouldUpdateStyle(e)) { updateStyle(tabPane); } } /** * {@inheritDoc} * * Overridden to keep track of whether the selected tab is also pressed. */ @Override protected MouseListener createMouseListener() { final MouseListener delegate = super.createMouseListener(); final MouseMotionListener delegate2 = (MouseMotionListener)delegate; return new MouseListener() { public void mouseClicked(MouseEvent e) { delegate.mouseClicked(e); } public void mouseEntered(MouseEvent e) { delegate.mouseEntered(e); } public void mouseExited(MouseEvent e) { delegate.mouseExited(e); } public void mousePressed(MouseEvent e) { if (!tabPane.isEnabled()) { return; } int tabIndex = tabForCoordinate(tabPane, e.getX(), e.getY()); if (tabIndex >= 0 && tabPane.isEnabledAt(tabIndex)) { if (tabIndex == tabPane.getSelectedIndex()) { // Clicking on selected tab selectedTabIsPressed = true; //TODO need to just repaint the tab area! tabPane.repaint(); } } //forward the event (this will set the selected index, or none at all delegate.mousePressed(e); } public void mouseReleased(MouseEvent e) { if (selectedTabIsPressed) { selectedTabIsPressed = false; //TODO need to just repaint the tab area! tabPane.repaint(); } //forward the event delegate.mouseReleased(e); //hack: The super method *should* be setting the mouse-over property correctly //here, but it doesn't. That is, when the mouse is released, whatever tab is below the //released mouse should be in rollover state. But, if you select a tab and don't //move the mouse, this doesn't happen. Hence, forwarding the event. delegate2.mouseMoved(e); } }; } /** * {@inheritDoc} */ @Override protected int getTabLabelShiftX(int tabPlacement, int tabIndex, boolean isSelected) { if (nudgeSelectedLabel) { return super.getTabLabelShiftX(tabPlacement, tabIndex, isSelected); } else { return 0; } } /** * {@inheritDoc} */ @Override protected int getTabLabelShiftY(int tabPlacement, int tabIndex, boolean isSelected) { if (nudgeSelectedLabel) { return super.getTabLabelShiftY(tabPlacement, tabIndex, isSelected); } else { return 0; } } /** * Notifies this UI delegate to repaint the specified component. * This method paints the component background, then calls * the {@link #paint(SynthContext,Graphics)} method. * * <p>In general, this method does not need to be overridden by subclasses. * All Look and Feel rendering code should reside in the {@code paint} method. * * @param g the {@code Graphics} object used for painting * @param c the component being painted * @see #paint(SynthContext,Graphics) */ @Override public void update(Graphics g, JComponent c) { SynthContext context = getContext(c); SynthLookAndFeel.update(context, g); context.getPainter().paintTabbedPaneBackground(context, g, 0, 0, c.getWidth(), c.getHeight()); paint(context, g); context.dispose(); } /** * {@inheritDoc} */ @Override protected int getBaseline(int tab) { if (tabPane.getTabComponentAt(tab) != null || getTextViewForTab(tab) != null) { return super.getBaseline(tab); } String title = tabPane.getTitleAt(tab); Font font = tabContext.getStyle().getFont(tabContext); FontMetrics metrics = getFontMetrics(font); Icon icon = getIconForTab(tab); textRect.setBounds(0, 0, 0, 0); iconRect.setBounds(0, 0, 0, 0); calcRect.setBounds(0, 0, Short.MAX_VALUE, maxTabHeight); tabContext.getStyle().getGraphicsUtils(tabContext).layoutText( tabContext, metrics, title, icon, SwingUtilities.CENTER, SwingUtilities.CENTER, SwingUtilities.LEADING, SwingUtilities.CENTER, calcRect, iconRect, textRect, textIconGap); return textRect.y + metrics.getAscent() + getBaselineOffset(); } /** * {@inheritDoc} */ @Override public void paintBorder(SynthContext context, Graphics g, int x, int y, int w, int h) { context.getPainter().paintTabbedPaneBorder(context, g, x, y, w, h); } /** * Paints the specified component according to the Look and Feel. * <p>This method is not used by Synth Look and Feel. * Painting is handled by the {@link #paint(SynthContext,Graphics)} method. * * @param g the {@code Graphics} object used for painting * @param c the component being painted * @see #paint(SynthContext,Graphics) */ @Override public void paint(Graphics g, JComponent c) { SynthContext context = getContext(c); paint(context, g); context.dispose(); } /** * Paints the specified component. * * @param context context for the component being painted * @param g the {@code Graphics} object used for painting * @see #update(Graphics,JComponent) */ protected void paint(SynthContext context, Graphics g) { int selectedIndex = tabPane.getSelectedIndex(); int tabPlacement = tabPane.getTabPlacement(); ensureCurrentLayout(); // Paint tab area // If scrollable tabs are enabled, the tab area will be // painted by the scrollable tab panel instead. // if (!scrollableTabLayoutEnabled()) { // WRAP_TAB_LAYOUT Insets insets = tabPane.getInsets(); int x = insets.left; int y = insets.top; int width = tabPane.getWidth() - insets.left - insets.right; int height = tabPane.getHeight() - insets.top - insets.bottom; int size; switch(tabPlacement) { case LEFT: width = calculateTabAreaWidth(tabPlacement, runCount, maxTabWidth); break; case RIGHT: size = calculateTabAreaWidth(tabPlacement, runCount, maxTabWidth); x = x + width - size; width = size; break; case BOTTOM: size = calculateTabAreaHeight(tabPlacement, runCount, maxTabHeight); y = y + height - size; height = size; break; case TOP: default: height = calculateTabAreaHeight(tabPlacement, runCount, maxTabHeight); } tabAreaBounds.setBounds(x, y, width, height); if (g.getClipBounds().intersects(tabAreaBounds)) { paintTabArea(tabAreaContext, g, tabPlacement, selectedIndex, tabAreaBounds); } } // Paint content border paintContentBorder(tabContentContext, g, tabPlacement, selectedIndex); } protected void paintTabArea(Graphics g, int tabPlacement, int selectedIndex) { // This can be invoked from ScrollabeTabPanel Insets insets = tabPane.getInsets(); int x = insets.left; int y = insets.top; int width = tabPane.getWidth() - insets.left - insets.right; int height = tabPane.getHeight() - insets.top - insets.bottom; paintTabArea(tabAreaContext, g, tabPlacement, selectedIndex, new Rectangle(x, y, width, height)); } private void paintTabArea(SynthContext ss, Graphics g, int tabPlacement, int selectedIndex, Rectangle tabAreaBounds) { Rectangle clipRect = g.getClipBounds(); //if the tab area's states should match that of the selected tab, then //first update the selected tab's states, then set the state //for the tab area to match //otherwise, restore the tab area's state to ENABLED (which is the //only supported state otherwise). if (tabAreaStatesMatchSelectedTab && selectedIndex >= 0) { updateTabContext(selectedIndex, true, selectedTabIsPressed, (getRolloverTab() == selectedIndex), (getFocusIndex() == selectedIndex)); ss.setComponentState(tabContext.getComponentState()); } else { ss.setComponentState(SynthConstants.ENABLED); } // Paint the tab area. SynthLookAndFeel.updateSubregion(ss, g, tabAreaBounds); ss.getPainter().paintTabbedPaneTabAreaBackground(ss, g, tabAreaBounds.x, tabAreaBounds.y, tabAreaBounds.width, tabAreaBounds.height, tabPlacement); ss.getPainter().paintTabbedPaneTabAreaBorder(ss, g, tabAreaBounds.x, tabAreaBounds.y, tabAreaBounds.width, tabAreaBounds.height, tabPlacement); int tabCount = tabPane.getTabCount(); iconRect.setBounds(0, 0, 0, 0); textRect.setBounds(0, 0, 0, 0); // Paint tabRuns of tabs from back to front for (int i = runCount - 1; i >= 0; i--) { int start = tabRuns[i]; int next = tabRuns[(i == runCount - 1)? 0 : i + 1]; int end = (next != 0? next - 1: tabCount - 1); for (int j = start; j <= end; j++) { if (rects[j].intersects(clipRect) && selectedIndex != j) { paintTab(tabContext, g, tabPlacement, rects, j, iconRect, textRect); } } } if (selectedIndex >= 0) { if (rects[selectedIndex].intersects(clipRect)) { paintTab(tabContext, g, tabPlacement, rects, selectedIndex, iconRect, textRect); } } } /** * {@inheritDoc} */ @Override protected void setRolloverTab(int index) { int oldRolloverTab = getRolloverTab(); super.setRolloverTab(index); Rectangle r = null; if (oldRolloverTab != index && tabAreaStatesMatchSelectedTab) { //TODO need to just repaint the tab area! tabPane.repaint(); } else { if ((oldRolloverTab >= 0) && (oldRolloverTab < tabPane.getTabCount())) { r = getTabBounds(tabPane, oldRolloverTab); if (r != null) { tabPane.repaint(r); } } if (index >= 0) { r = getTabBounds(tabPane, index); if (r != null) { tabPane.repaint(r); } } } } private void paintTab(SynthContext ss, Graphics g, int tabPlacement, Rectangle[] rects, int tabIndex, Rectangle iconRect, Rectangle textRect) { Rectangle tabRect = rects[tabIndex]; int selectedIndex = tabPane.getSelectedIndex(); boolean isSelected = selectedIndex == tabIndex; updateTabContext(tabIndex, isSelected, isSelected && selectedTabIsPressed, (getRolloverTab() == tabIndex), (getFocusIndex() == tabIndex)); SynthLookAndFeel.updateSubregion(ss, g, tabRect); int x = tabRect.x; int y = tabRect.y; int height = tabRect.height; int width = tabRect.width; int placement = tabPane.getTabPlacement(); if (extendTabsToBase && runCount > 1) { //paint this tab such that its edge closest to the base is equal to //edge of the selected tab closest to the base. In terms of the TOP //tab placement, this will cause the bottom of each tab to be //painted even with the bottom of the selected tab. This is because //in each tab placement (TOP, LEFT, BOTTOM, RIGHT) the selected tab //is closest to the base. if (selectedIndex >= 0) { Rectangle r = rects[selectedIndex]; switch (placement) { case TOP: int bottomY = r.y + r.height; height = bottomY - tabRect.y; break; case LEFT: int rightX = r.x + r.width; width = rightX - tabRect.x; break; case BOTTOM: int topY = r.y; height = (tabRect.y + tabRect.height) - topY; y = topY; break; case RIGHT: int leftX = r.x; width = (tabRect.x + tabRect.width) - leftX; x = leftX; break; } } } tabContext.getPainter().paintTabbedPaneTabBackground(tabContext, g, x, y, width, height, tabIndex, placement); tabContext.getPainter().paintTabbedPaneTabBorder(tabContext, g, x, y, width, height, tabIndex, placement); if (tabPane.getTabComponentAt(tabIndex) == null) { String title = tabPane.getTitleAt(tabIndex); Font font = ss.getStyle().getFont(ss); FontMetrics metrics = SwingUtilities2.getFontMetrics(tabPane, g, font); Icon icon = getIconForTab(tabIndex); layoutLabel(ss, tabPlacement, metrics, tabIndex, title, icon, tabRect, iconRect, textRect, isSelected); paintText(ss, g, tabPlacement, font, metrics, tabIndex, title, textRect, isSelected); paintIcon(g, tabPlacement, tabIndex, icon, iconRect, isSelected); } } private void layoutLabel(SynthContext ss, int tabPlacement, FontMetrics metrics, int tabIndex, String title, Icon icon, Rectangle tabRect, Rectangle iconRect, Rectangle textRect, boolean isSelected ) { View v = getTextViewForTab(tabIndex); if (v != null) { tabPane.putClientProperty("html", v); } textRect.x = textRect.y = iconRect.x = iconRect.y = 0; ss.getStyle().getGraphicsUtils(ss).layoutText(ss, metrics, title, icon, SwingUtilities.CENTER, SwingUtilities.CENTER, SwingUtilities.LEADING, SwingUtilities.CENTER, tabRect, iconRect, textRect, textIconGap); tabPane.putClientProperty("html", null); int xNudge = getTabLabelShiftX(tabPlacement, tabIndex, isSelected); int yNudge = getTabLabelShiftY(tabPlacement, tabIndex, isSelected); iconRect.x += xNudge; iconRect.y += yNudge; textRect.x += xNudge; textRect.y += yNudge; } private void paintText(SynthContext ss, Graphics g, int tabPlacement, Font font, FontMetrics metrics, int tabIndex, String title, Rectangle textRect, boolean isSelected) { g.setFont(font); View v = getTextViewForTab(tabIndex); if (v != null) { // html v.paint(g, textRect); } else { // plain text int mnemIndex = tabPane.getDisplayedMnemonicIndexAt(tabIndex); g.setColor(ss.getStyle().getColor(ss, ColorType.TEXT_FOREGROUND)); ss.getStyle().getGraphicsUtils(ss).paintText(ss, g, title, textRect, mnemIndex); } } private void paintContentBorder(SynthContext ss, Graphics g, int tabPlacement, int selectedIndex) { int width = tabPane.getWidth(); int height = tabPane.getHeight(); Insets insets = tabPane.getInsets(); int x = insets.left; int y = insets.top; int w = width - insets.right - insets.left; int h = height - insets.top - insets.bottom; switch(tabPlacement) { case LEFT: x += calculateTabAreaWidth(tabPlacement, runCount, maxTabWidth); w -= (x - insets.left); break; case RIGHT: w -= calculateTabAreaWidth(tabPlacement, runCount, maxTabWidth); break; case BOTTOM: h -= calculateTabAreaHeight(tabPlacement, runCount, maxTabHeight); break; case TOP: default: y += calculateTabAreaHeight(tabPlacement, runCount, maxTabHeight); h -= (y - insets.top); } SynthLookAndFeel.updateSubregion(ss, g, new Rectangle(x, y, w, h)); ss.getPainter().paintTabbedPaneContentBackground(ss, g, x, y, w, h); ss.getPainter().paintTabbedPaneContentBorder(ss, g, x, y, w, h); } private void ensureCurrentLayout() { if (!tabPane.isValid()) { tabPane.validate(); } /* If tabPane doesn't have a peer yet, the validate() call will * silently fail. We handle that by forcing a layout if tabPane * is still invalid. See bug 4237677. */ if (!tabPane.isValid()) { TabbedPaneLayout layout = (TabbedPaneLayout)tabPane.getLayout(); layout.calculateLayoutInfo(); } } /** * {@inheritDoc} */ @Override protected int calculateMaxTabHeight(int tabPlacement) { FontMetrics metrics = getFontMetrics(tabContext.getStyle().getFont( tabContext)); int tabCount = tabPane.getTabCount(); int result = 0; int fontHeight = metrics.getHeight(); for(int i = 0; i < tabCount; i++) { result = Math.max(calculateTabHeight(tabPlacement, i, fontHeight), result); } return result; } /** * {@inheritDoc} */ @Override protected int calculateTabWidth(int tabPlacement, int tabIndex, FontMetrics metrics) { Icon icon = getIconForTab(tabIndex); Insets tabInsets = getTabInsets(tabPlacement, tabIndex); int width = tabInsets.left + tabInsets.right; Component tabComponent = tabPane.getTabComponentAt(tabIndex); if (tabComponent != null) { width += tabComponent.getPreferredSize().width; } else { if (icon != null) { width += icon.getIconWidth() + textIconGap; } View v = getTextViewForTab(tabIndex); if (v != null) { // html width += (int) v.getPreferredSpan(View.X_AXIS); } else { // plain text String title = tabPane.getTitleAt(tabIndex); width += tabContext.getStyle().getGraphicsUtils(tabContext). computeStringWidth(tabContext, metrics.getFont(), metrics, title); } } return width; } /** * {@inheritDoc} */ @Override protected int calculateMaxTabWidth(int tabPlacement) { FontMetrics metrics = getFontMetrics(tabContext.getStyle().getFont( tabContext)); int tabCount = tabPane.getTabCount(); int result = 0; for(int i = 0; i < tabCount; i++) { result = Math.max(calculateTabWidth(tabPlacement, i, metrics), result); } return result; } /** * {@inheritDoc} */ @Override protected Insets getTabInsets(int tabPlacement, int tabIndex) { updateTabContext(tabIndex, false, false, false, (getFocusIndex() == tabIndex)); return tabInsets; } /** * {@inheritDoc} */ @Override protected FontMetrics getFontMetrics() { return getFontMetrics(tabContext.getStyle().getFont(tabContext)); } private FontMetrics getFontMetrics(Font font) { return tabPane.getFontMetrics(font); } private void updateTabContext(int index, boolean selected, boolean isMouseDown, boolean isMouseOver, boolean hasFocus) { int state = 0; if (!tabPane.isEnabled() || !tabPane.isEnabledAt(index)) { state |= SynthConstants.DISABLED; if (selected) { state |= SynthConstants.SELECTED; } } else if (selected) { state |= (SynthConstants.ENABLED | SynthConstants.SELECTED); if (isMouseOver && UIManager.getBoolean("TabbedPane.isTabRollover")) { state |= SynthConstants.MOUSE_OVER; } } else if (isMouseOver) { state |= (SynthConstants.ENABLED | SynthConstants.MOUSE_OVER); } else { state = SynthLookAndFeel.getComponentState(tabPane); state &= ~SynthConstants.FOCUSED; // don't use tabbedpane focus state } if (hasFocus && tabPane.hasFocus()) { state |= SynthConstants.FOCUSED; // individual tab has focus } if (isMouseDown) { state |= SynthConstants.PRESSED; } tabContext.setComponentState(state); } /** * {@inheritDoc} * * Overridden to create a TabbedPaneLayout subclass which takes into * account tabOverlap. */ @Override protected LayoutManager createLayoutManager() { if (tabPane.getTabLayoutPolicy() == JTabbedPane.SCROLL_TAB_LAYOUT) { return super.createLayoutManager(); } else { /* WRAP_TAB_LAYOUT */ return new TabbedPaneLayout() { @Override public void calculateLayoutInfo() { super.calculateLayoutInfo(); //shift all the tabs, if necessary if (tabOverlap != 0) { int tabCount = tabPane.getTabCount(); //left-to-right/right-to-left only affects layout //when placement is TOP or BOTTOM boolean ltr = tabPane.getComponentOrientation().isLeftToRight(); for (int i = runCount - 1; i >= 0; i--) { int start = tabRuns[i]; int next = tabRuns[(i == runCount - 1)? 0 : i + 1]; int end = (next != 0? next - 1: tabCount - 1); for (int j = start+1; j <= end; j++) { // xshift and yshift represent the amount & // direction to shift the tab in their // respective axis. int xshift = 0; int yshift = 0; // configure xshift and y shift based on tab // position and ltr/rtl switch (tabPane.getTabPlacement()) { case JTabbedPane.TOP: case JTabbedPane.BOTTOM: xshift = ltr ? tabOverlap : -tabOverlap; break; case JTabbedPane.LEFT: case JTabbedPane.RIGHT: yshift = tabOverlap; break; default: //do nothing } rects[j].x += xshift; rects[j].y += yshift; rects[j].width += Math.abs(xshift); rects[j].height += Math.abs(yshift); } } } } }; } } private class SynthScrollableTabButton extends SynthArrowButton implements UIResource { public SynthScrollableTabButton(int direction) { super(direction); setName("TabbedPane.button"); } } }
googleapis/google-cloud-java
35,078
java-area120-tables/proto-google-area120-tables-v1alpha1/src/main/java/com/google/area120/tables/v1alpha1/ListRowsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/area120/tables/v1alpha1/tables.proto // Protobuf Java Version: 3.25.8 package com.google.area120.tables.v1alpha1; /** * * * <pre> * Response message for TablesService.ListRows. * </pre> * * Protobuf type {@code google.area120.tables.v1alpha1.ListRowsResponse} */ public final class ListRowsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.area120.tables.v1alpha1.ListRowsResponse) ListRowsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListRowsResponse.newBuilder() to construct. private ListRowsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListRowsResponse() { rows_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListRowsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.area120.tables.v1alpha1.TablesProto .internal_static_google_area120_tables_v1alpha1_ListRowsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.area120.tables.v1alpha1.TablesProto .internal_static_google_area120_tables_v1alpha1_ListRowsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.area120.tables.v1alpha1.ListRowsResponse.class, com.google.area120.tables.v1alpha1.ListRowsResponse.Builder.class); } public static final int ROWS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.area120.tables.v1alpha1.Row> rows_; /** * * * <pre> * The rows from the specified table. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Row rows = 1;</code> */ @java.lang.Override public java.util.List<com.google.area120.tables.v1alpha1.Row> getRowsList() { return rows_; } /** * * * <pre> * The rows from the specified table. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Row rows = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.area120.tables.v1alpha1.RowOrBuilder> getRowsOrBuilderList() { return rows_; } /** * * * <pre> * The rows from the specified table. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Row rows = 1;</code> */ @java.lang.Override public int getRowsCount() { return rows_.size(); } /** * * * <pre> * The rows from the specified table. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Row rows = 1;</code> */ @java.lang.Override public com.google.area120.tables.v1alpha1.Row getRows(int index) { return rows_.get(index); } /** * * * <pre> * The rows from the specified table. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Row rows = 1;</code> */ @java.lang.Override public com.google.area120.tables.v1alpha1.RowOrBuilder getRowsOrBuilder(int index) { return rows_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is empty, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is empty, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < rows_.size(); i++) { output.writeMessage(1, rows_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < rows_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, rows_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.area120.tables.v1alpha1.ListRowsResponse)) { return super.equals(obj); } com.google.area120.tables.v1alpha1.ListRowsResponse other = (com.google.area120.tables.v1alpha1.ListRowsResponse) obj; if (!getRowsList().equals(other.getRowsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getRowsCount() > 0) { hash = (37 * hash) + ROWS_FIELD_NUMBER; hash = (53 * hash) + getRowsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.area120.tables.v1alpha1.ListRowsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.area120.tables.v1alpha1.ListRowsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.area120.tables.v1alpha1.ListRowsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.area120.tables.v1alpha1.ListRowsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.area120.tables.v1alpha1.ListRowsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.area120.tables.v1alpha1.ListRowsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.area120.tables.v1alpha1.ListRowsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.area120.tables.v1alpha1.ListRowsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.area120.tables.v1alpha1.ListRowsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.area120.tables.v1alpha1.ListRowsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.area120.tables.v1alpha1.ListRowsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.area120.tables.v1alpha1.ListRowsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.area120.tables.v1alpha1.ListRowsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for TablesService.ListRows. * </pre> * * Protobuf type {@code google.area120.tables.v1alpha1.ListRowsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.area120.tables.v1alpha1.ListRowsResponse) com.google.area120.tables.v1alpha1.ListRowsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.area120.tables.v1alpha1.TablesProto .internal_static_google_area120_tables_v1alpha1_ListRowsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.area120.tables.v1alpha1.TablesProto .internal_static_google_area120_tables_v1alpha1_ListRowsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.area120.tables.v1alpha1.ListRowsResponse.class, com.google.area120.tables.v1alpha1.ListRowsResponse.Builder.class); } // Construct using com.google.area120.tables.v1alpha1.ListRowsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (rowsBuilder_ == null) { rows_ = java.util.Collections.emptyList(); } else { rows_ = null; rowsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.area120.tables.v1alpha1.TablesProto .internal_static_google_area120_tables_v1alpha1_ListRowsResponse_descriptor; } @java.lang.Override public com.google.area120.tables.v1alpha1.ListRowsResponse getDefaultInstanceForType() { return com.google.area120.tables.v1alpha1.ListRowsResponse.getDefaultInstance(); } @java.lang.Override public com.google.area120.tables.v1alpha1.ListRowsResponse build() { com.google.area120.tables.v1alpha1.ListRowsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.area120.tables.v1alpha1.ListRowsResponse buildPartial() { com.google.area120.tables.v1alpha1.ListRowsResponse result = new com.google.area120.tables.v1alpha1.ListRowsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.area120.tables.v1alpha1.ListRowsResponse result) { if (rowsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { rows_ = java.util.Collections.unmodifiableList(rows_); bitField0_ = (bitField0_ & ~0x00000001); } result.rows_ = rows_; } else { result.rows_ = rowsBuilder_.build(); } } private void buildPartial0(com.google.area120.tables.v1alpha1.ListRowsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.area120.tables.v1alpha1.ListRowsResponse) { return mergeFrom((com.google.area120.tables.v1alpha1.ListRowsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.area120.tables.v1alpha1.ListRowsResponse other) { if (other == com.google.area120.tables.v1alpha1.ListRowsResponse.getDefaultInstance()) return this; if (rowsBuilder_ == null) { if (!other.rows_.isEmpty()) { if (rows_.isEmpty()) { rows_ = other.rows_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureRowsIsMutable(); rows_.addAll(other.rows_); } onChanged(); } } else { if (!other.rows_.isEmpty()) { if (rowsBuilder_.isEmpty()) { rowsBuilder_.dispose(); rowsBuilder_ = null; rows_ = other.rows_; bitField0_ = (bitField0_ & ~0x00000001); rowsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRowsFieldBuilder() : null; } else { rowsBuilder_.addAllMessages(other.rows_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.area120.tables.v1alpha1.Row m = input.readMessage( com.google.area120.tables.v1alpha1.Row.parser(), extensionRegistry); if (rowsBuilder_ == null) { ensureRowsIsMutable(); rows_.add(m); } else { rowsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.area120.tables.v1alpha1.Row> rows_ = java.util.Collections.emptyList(); private void ensureRowsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { rows_ = new java.util.ArrayList<com.google.area120.tables.v1alpha1.Row>(rows_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.area120.tables.v1alpha1.Row, com.google.area120.tables.v1alpha1.Row.Builder, com.google.area120.tables.v1alpha1.RowOrBuilder> rowsBuilder_; /** * * * <pre> * The rows from the specified table. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Row rows = 1;</code> */ public java.util.List<com.google.area120.tables.v1alpha1.Row> getRowsList() { if (rowsBuilder_ == null) { return java.util.Collections.unmodifiableList(rows_); } else { return rowsBuilder_.getMessageList(); } } /** * * * <pre> * The rows from the specified table. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Row rows = 1;</code> */ public int getRowsCount() { if (rowsBuilder_ == null) { return rows_.size(); } else { return rowsBuilder_.getCount(); } } /** * * * <pre> * The rows from the specified table. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Row rows = 1;</code> */ public com.google.area120.tables.v1alpha1.Row getRows(int index) { if (rowsBuilder_ == null) { return rows_.get(index); } else { return rowsBuilder_.getMessage(index); } } /** * * * <pre> * The rows from the specified table. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Row rows = 1;</code> */ public Builder setRows(int index, com.google.area120.tables.v1alpha1.Row value) { if (rowsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRowsIsMutable(); rows_.set(index, value); onChanged(); } else { rowsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The rows from the specified table. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Row rows = 1;</code> */ public Builder setRows( int index, com.google.area120.tables.v1alpha1.Row.Builder builderForValue) { if (rowsBuilder_ == null) { ensureRowsIsMutable(); rows_.set(index, builderForValue.build()); onChanged(); } else { rowsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The rows from the specified table. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Row rows = 1;</code> */ public Builder addRows(com.google.area120.tables.v1alpha1.Row value) { if (rowsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRowsIsMutable(); rows_.add(value); onChanged(); } else { rowsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The rows from the specified table. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Row rows = 1;</code> */ public Builder addRows(int index, com.google.area120.tables.v1alpha1.Row value) { if (rowsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRowsIsMutable(); rows_.add(index, value); onChanged(); } else { rowsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The rows from the specified table. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Row rows = 1;</code> */ public Builder addRows(com.google.area120.tables.v1alpha1.Row.Builder builderForValue) { if (rowsBuilder_ == null) { ensureRowsIsMutable(); rows_.add(builderForValue.build()); onChanged(); } else { rowsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The rows from the specified table. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Row rows = 1;</code> */ public Builder addRows( int index, com.google.area120.tables.v1alpha1.Row.Builder builderForValue) { if (rowsBuilder_ == null) { ensureRowsIsMutable(); rows_.add(index, builderForValue.build()); onChanged(); } else { rowsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The rows from the specified table. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Row rows = 1;</code> */ public Builder addAllRows( java.lang.Iterable<? extends com.google.area120.tables.v1alpha1.Row> values) { if (rowsBuilder_ == null) { ensureRowsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, rows_); onChanged(); } else { rowsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The rows from the specified table. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Row rows = 1;</code> */ public Builder clearRows() { if (rowsBuilder_ == null) { rows_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { rowsBuilder_.clear(); } return this; } /** * * * <pre> * The rows from the specified table. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Row rows = 1;</code> */ public Builder removeRows(int index) { if (rowsBuilder_ == null) { ensureRowsIsMutable(); rows_.remove(index); onChanged(); } else { rowsBuilder_.remove(index); } return this; } /** * * * <pre> * The rows from the specified table. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Row rows = 1;</code> */ public com.google.area120.tables.v1alpha1.Row.Builder getRowsBuilder(int index) { return getRowsFieldBuilder().getBuilder(index); } /** * * * <pre> * The rows from the specified table. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Row rows = 1;</code> */ public com.google.area120.tables.v1alpha1.RowOrBuilder getRowsOrBuilder(int index) { if (rowsBuilder_ == null) { return rows_.get(index); } else { return rowsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The rows from the specified table. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Row rows = 1;</code> */ public java.util.List<? extends com.google.area120.tables.v1alpha1.RowOrBuilder> getRowsOrBuilderList() { if (rowsBuilder_ != null) { return rowsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(rows_); } } /** * * * <pre> * The rows from the specified table. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Row rows = 1;</code> */ public com.google.area120.tables.v1alpha1.Row.Builder addRowsBuilder() { return getRowsFieldBuilder() .addBuilder(com.google.area120.tables.v1alpha1.Row.getDefaultInstance()); } /** * * * <pre> * The rows from the specified table. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Row rows = 1;</code> */ public com.google.area120.tables.v1alpha1.Row.Builder addRowsBuilder(int index) { return getRowsFieldBuilder() .addBuilder(index, com.google.area120.tables.v1alpha1.Row.getDefaultInstance()); } /** * * * <pre> * The rows from the specified table. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Row rows = 1;</code> */ public java.util.List<com.google.area120.tables.v1alpha1.Row.Builder> getRowsBuilderList() { return getRowsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.area120.tables.v1alpha1.Row, com.google.area120.tables.v1alpha1.Row.Builder, com.google.area120.tables.v1alpha1.RowOrBuilder> getRowsFieldBuilder() { if (rowsBuilder_ == null) { rowsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.area120.tables.v1alpha1.Row, com.google.area120.tables.v1alpha1.Row.Builder, com.google.area120.tables.v1alpha1.RowOrBuilder>( rows_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); rows_ = null; } return rowsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is empty, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is empty, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is empty, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is empty, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is empty, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.area120.tables.v1alpha1.ListRowsResponse) } // @@protoc_insertion_point(class_scope:google.area120.tables.v1alpha1.ListRowsResponse) private static final com.google.area120.tables.v1alpha1.ListRowsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.area120.tables.v1alpha1.ListRowsResponse(); } public static com.google.area120.tables.v1alpha1.ListRowsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListRowsResponse> PARSER = new com.google.protobuf.AbstractParser<ListRowsResponse>() { @java.lang.Override public ListRowsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListRowsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListRowsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.area120.tables.v1alpha1.ListRowsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,081
java-alloydb/proto-google-cloud-alloydb-v1beta/src/main/java/com/google/cloud/alloydb/v1beta/CreateDatabaseRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/alloydb/v1beta/service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.alloydb.v1beta; /** * * * <pre> * Message for CreateDatabase request. * </pre> * * Protobuf type {@code google.cloud.alloydb.v1beta.CreateDatabaseRequest} */ public final class CreateDatabaseRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.alloydb.v1beta.CreateDatabaseRequest) CreateDatabaseRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateDatabaseRequest.newBuilder() to construct. private CreateDatabaseRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateDatabaseRequest() { parent_ = ""; databaseId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateDatabaseRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.alloydb.v1beta.ServiceProto .internal_static_google_cloud_alloydb_v1beta_CreateDatabaseRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.alloydb.v1beta.ServiceProto .internal_static_google_cloud_alloydb_v1beta_CreateDatabaseRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.alloydb.v1beta.CreateDatabaseRequest.class, com.google.cloud.alloydb.v1beta.CreateDatabaseRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. Value for parent. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. Value for parent. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int DATABASE_ID_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object databaseId_ = ""; /** * * * <pre> * Required. ID of the requesting object. * </pre> * * <code>string database_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The databaseId. */ @java.lang.Override public java.lang.String getDatabaseId() { java.lang.Object ref = databaseId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); databaseId_ = s; return s; } } /** * * * <pre> * Required. ID of the requesting object. * </pre> * * <code>string database_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for databaseId. */ @java.lang.Override public com.google.protobuf.ByteString getDatabaseIdBytes() { java.lang.Object ref = databaseId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); databaseId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int DATABASE_FIELD_NUMBER = 3; private com.google.cloud.alloydb.v1beta.Database database_; /** * * * <pre> * Required. The resource being created. * </pre> * * <code> * .google.cloud.alloydb.v1beta.Database database = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the database field is set. */ @java.lang.Override public boolean hasDatabase() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The resource being created. * </pre> * * <code> * .google.cloud.alloydb.v1beta.Database database = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The database. */ @java.lang.Override public com.google.cloud.alloydb.v1beta.Database getDatabase() { return database_ == null ? com.google.cloud.alloydb.v1beta.Database.getDefaultInstance() : database_; } /** * * * <pre> * Required. The resource being created. * </pre> * * <code> * .google.cloud.alloydb.v1beta.Database database = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.alloydb.v1beta.DatabaseOrBuilder getDatabaseOrBuilder() { return database_ == null ? com.google.cloud.alloydb.v1beta.Database.getDefaultInstance() : database_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(databaseId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, databaseId_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(3, getDatabase()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(databaseId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, databaseId_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getDatabase()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.alloydb.v1beta.CreateDatabaseRequest)) { return super.equals(obj); } com.google.cloud.alloydb.v1beta.CreateDatabaseRequest other = (com.google.cloud.alloydb.v1beta.CreateDatabaseRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getDatabaseId().equals(other.getDatabaseId())) return false; if (hasDatabase() != other.hasDatabase()) return false; if (hasDatabase()) { if (!getDatabase().equals(other.getDatabase())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + DATABASE_ID_FIELD_NUMBER; hash = (53 * hash) + getDatabaseId().hashCode(); if (hasDatabase()) { hash = (37 * hash) + DATABASE_FIELD_NUMBER; hash = (53 * hash) + getDatabase().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.alloydb.v1beta.CreateDatabaseRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.v1beta.CreateDatabaseRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.v1beta.CreateDatabaseRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.v1beta.CreateDatabaseRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.v1beta.CreateDatabaseRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.v1beta.CreateDatabaseRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.v1beta.CreateDatabaseRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.alloydb.v1beta.CreateDatabaseRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.alloydb.v1beta.CreateDatabaseRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.alloydb.v1beta.CreateDatabaseRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.alloydb.v1beta.CreateDatabaseRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.alloydb.v1beta.CreateDatabaseRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.alloydb.v1beta.CreateDatabaseRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Message for CreateDatabase request. * </pre> * * Protobuf type {@code google.cloud.alloydb.v1beta.CreateDatabaseRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.alloydb.v1beta.CreateDatabaseRequest) com.google.cloud.alloydb.v1beta.CreateDatabaseRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.alloydb.v1beta.ServiceProto .internal_static_google_cloud_alloydb_v1beta_CreateDatabaseRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.alloydb.v1beta.ServiceProto .internal_static_google_cloud_alloydb_v1beta_CreateDatabaseRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.alloydb.v1beta.CreateDatabaseRequest.class, com.google.cloud.alloydb.v1beta.CreateDatabaseRequest.Builder.class); } // Construct using com.google.cloud.alloydb.v1beta.CreateDatabaseRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getDatabaseFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; databaseId_ = ""; database_ = null; if (databaseBuilder_ != null) { databaseBuilder_.dispose(); databaseBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.alloydb.v1beta.ServiceProto .internal_static_google_cloud_alloydb_v1beta_CreateDatabaseRequest_descriptor; } @java.lang.Override public com.google.cloud.alloydb.v1beta.CreateDatabaseRequest getDefaultInstanceForType() { return com.google.cloud.alloydb.v1beta.CreateDatabaseRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.alloydb.v1beta.CreateDatabaseRequest build() { com.google.cloud.alloydb.v1beta.CreateDatabaseRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.alloydb.v1beta.CreateDatabaseRequest buildPartial() { com.google.cloud.alloydb.v1beta.CreateDatabaseRequest result = new com.google.cloud.alloydb.v1beta.CreateDatabaseRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.alloydb.v1beta.CreateDatabaseRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.databaseId_ = databaseId_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000004) != 0)) { result.database_ = databaseBuilder_ == null ? database_ : databaseBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.alloydb.v1beta.CreateDatabaseRequest) { return mergeFrom((com.google.cloud.alloydb.v1beta.CreateDatabaseRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.alloydb.v1beta.CreateDatabaseRequest other) { if (other == com.google.cloud.alloydb.v1beta.CreateDatabaseRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getDatabaseId().isEmpty()) { databaseId_ = other.databaseId_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasDatabase()) { mergeDatabase(other.getDatabase()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { databaseId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { input.readMessage(getDatabaseFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. Value for parent. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Value for parent. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Value for parent. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Value for parent. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. Value for parent. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object databaseId_ = ""; /** * * * <pre> * Required. ID of the requesting object. * </pre> * * <code>string database_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The databaseId. */ public java.lang.String getDatabaseId() { java.lang.Object ref = databaseId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); databaseId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. ID of the requesting object. * </pre> * * <code>string database_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for databaseId. */ public com.google.protobuf.ByteString getDatabaseIdBytes() { java.lang.Object ref = databaseId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); databaseId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. ID of the requesting object. * </pre> * * <code>string database_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The databaseId to set. * @return This builder for chaining. */ public Builder setDatabaseId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } databaseId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. ID of the requesting object. * </pre> * * <code>string database_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearDatabaseId() { databaseId_ = getDefaultInstance().getDatabaseId(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Required. ID of the requesting object. * </pre> * * <code>string database_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for databaseId to set. * @return This builder for chaining. */ public Builder setDatabaseIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); databaseId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private com.google.cloud.alloydb.v1beta.Database database_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.alloydb.v1beta.Database, com.google.cloud.alloydb.v1beta.Database.Builder, com.google.cloud.alloydb.v1beta.DatabaseOrBuilder> databaseBuilder_; /** * * * <pre> * Required. The resource being created. * </pre> * * <code> * .google.cloud.alloydb.v1beta.Database database = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the database field is set. */ public boolean hasDatabase() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * Required. The resource being created. * </pre> * * <code> * .google.cloud.alloydb.v1beta.Database database = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The database. */ public com.google.cloud.alloydb.v1beta.Database getDatabase() { if (databaseBuilder_ == null) { return database_ == null ? com.google.cloud.alloydb.v1beta.Database.getDefaultInstance() : database_; } else { return databaseBuilder_.getMessage(); } } /** * * * <pre> * Required. The resource being created. * </pre> * * <code> * .google.cloud.alloydb.v1beta.Database database = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setDatabase(com.google.cloud.alloydb.v1beta.Database value) { if (databaseBuilder_ == null) { if (value == null) { throw new NullPointerException(); } database_ = value; } else { databaseBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. The resource being created. * </pre> * * <code> * .google.cloud.alloydb.v1beta.Database database = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setDatabase(com.google.cloud.alloydb.v1beta.Database.Builder builderForValue) { if (databaseBuilder_ == null) { database_ = builderForValue.build(); } else { databaseBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. The resource being created. * </pre> * * <code> * .google.cloud.alloydb.v1beta.Database database = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeDatabase(com.google.cloud.alloydb.v1beta.Database value) { if (databaseBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && database_ != null && database_ != com.google.cloud.alloydb.v1beta.Database.getDefaultInstance()) { getDatabaseBuilder().mergeFrom(value); } else { database_ = value; } } else { databaseBuilder_.mergeFrom(value); } if (database_ != null) { bitField0_ |= 0x00000004; onChanged(); } return this; } /** * * * <pre> * Required. The resource being created. * </pre> * * <code> * .google.cloud.alloydb.v1beta.Database database = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearDatabase() { bitField0_ = (bitField0_ & ~0x00000004); database_ = null; if (databaseBuilder_ != null) { databaseBuilder_.dispose(); databaseBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The resource being created. * </pre> * * <code> * .google.cloud.alloydb.v1beta.Database database = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.alloydb.v1beta.Database.Builder getDatabaseBuilder() { bitField0_ |= 0x00000004; onChanged(); return getDatabaseFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The resource being created. * </pre> * * <code> * .google.cloud.alloydb.v1beta.Database database = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.alloydb.v1beta.DatabaseOrBuilder getDatabaseOrBuilder() { if (databaseBuilder_ != null) { return databaseBuilder_.getMessageOrBuilder(); } else { return database_ == null ? com.google.cloud.alloydb.v1beta.Database.getDefaultInstance() : database_; } } /** * * * <pre> * Required. The resource being created. * </pre> * * <code> * .google.cloud.alloydb.v1beta.Database database = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.alloydb.v1beta.Database, com.google.cloud.alloydb.v1beta.Database.Builder, com.google.cloud.alloydb.v1beta.DatabaseOrBuilder> getDatabaseFieldBuilder() { if (databaseBuilder_ == null) { databaseBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.alloydb.v1beta.Database, com.google.cloud.alloydb.v1beta.Database.Builder, com.google.cloud.alloydb.v1beta.DatabaseOrBuilder>( getDatabase(), getParentForChildren(), isClean()); database_ = null; } return databaseBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.alloydb.v1beta.CreateDatabaseRequest) } // @@protoc_insertion_point(class_scope:google.cloud.alloydb.v1beta.CreateDatabaseRequest) private static final com.google.cloud.alloydb.v1beta.CreateDatabaseRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.alloydb.v1beta.CreateDatabaseRequest(); } public static com.google.cloud.alloydb.v1beta.CreateDatabaseRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateDatabaseRequest> PARSER = new com.google.protobuf.AbstractParser<CreateDatabaseRequest>() { @java.lang.Override public CreateDatabaseRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateDatabaseRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateDatabaseRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.alloydb.v1beta.CreateDatabaseRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/commons-imaging
35,650
src/main/java/org/apache/commons/imaging/formats/tiff/constants/ExifTagConstants.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.imaging.formats.tiff.constants; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfo; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoAscii; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoByte; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoBytes; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoDirectory; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoDouble; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoGpsText; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoLong; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoLongs; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoRational; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoRationals; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoSLong; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoSRational; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoShort; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoShortOrLong; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoShorts; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoUndefined; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoUndefineds; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoUnknowns; /** * References: * <ul> * <li>https://exiftool.org/TagNames/EXIF.html</li> * <li>[BROKEN URL] http://tiki-lounge.com/~raf/tiff/fields.html</li> * <li>https://www.awaresystems.be/imaging/tiff/tifftags.html</li> * <li>"Stonits": http://www.anyhere.com/gward/pixformat/tiffluv.html</li> * </ul> */ public final class ExifTagConstants { public static final TagInfoAscii EXIF_TAG_INTEROPERABILITY_INDEX = new TagInfoAscii("InteroperabilityIndex", 0x0001, -1, TiffDirectoryType.EXIF_DIRECTORY_INTEROP_IFD); public static final TagInfoUndefined EXIF_TAG_INTEROPERABILITY_VERSION = new TagInfoUndefined("InteroperabilityVersion", 0x0002, TiffDirectoryType.EXIF_DIRECTORY_INTEROP_IFD); public static final TagInfoAscii EXIF_TAG_PROCESSING_SOFTWARE = new TagInfoAscii("ProcessingSoftware", 0x000b, -1, TiffDirectoryType.EXIF_DIRECTORY_IFD0); public static final TagInfoAscii EXIF_TAG_SOFTWARE = new TagInfoAscii("Software", 0x0131, -1, TiffDirectoryType.EXIF_DIRECTORY_IFD0); public static final TagInfoLong EXIF_TAG_PREVIEW_IMAGE_START_IFD0 = new TagInfoLong("PreviewImageStart", 0x0111, TiffDirectoryType.EXIF_DIRECTORY_IFD0, true); public static final TagInfoLong EXIF_TAG_PREVIEW_IMAGE_START_SUB_IFD1 = new TagInfoLong("PreviewImageStart", 0x0111, TiffDirectoryType.EXIF_DIRECTORY_SUB_IFD1, true); public static final TagInfoLong EXIF_TAG_JPG_FROM_RAW_START_SUB_IFD2 = new TagInfoLong("JpgFromRawStart", 0x0111, TiffDirectoryType.EXIF_DIRECTORY_SUB_IFD2, true); public static final TagInfoLong EXIF_TAG_PREVIEW_IMAGE_LENGTH_IFD0 = new TagInfoLong("PreviewImageLength", 0x0117, TiffDirectoryType.EXIF_DIRECTORY_IFD0); public static final TagInfoLong EXIF_TAG_PREVIEW_IMAGE_LENGTH_SUB_IFD1 = new TagInfoLong("PreviewImageLength", 0x0117, TiffDirectoryType.EXIF_DIRECTORY_SUB_IFD1); public static final TagInfoLong EXIF_TAG_JPG_FROM_RAW_LENGTH_SUB_IFD2 = new TagInfoLong("JpgFromRawLength", 0x0117, TiffDirectoryType.EXIF_DIRECTORY_SUB_IFD2); public static final TagInfoLong EXIF_TAG_PREVIEW_IMAGE_START_MAKER_NOTES = new TagInfoLong("PreviewImageStart", 0x0201, TiffDirectoryType.EXIF_DIRECTORY_MAKER_NOTES); public static final TagInfoLong EXIF_TAG_JPG_FROM_RAW_START_SUB_IFD = new TagInfoLong("JpgFromRawStart", 0x0201, TiffDirectoryType.EXIF_DIRECTORY_SUB_IFD, true); public static final TagInfoLong EXIF_TAG_JPG_FROM_RAW_START_IFD2 = new TagInfoLong("JpgFromRawStart", 0x0201, TiffDirectoryType.EXIF_DIRECTORY_IFD2, true); public static final TagInfoLong EXIF_TAG_OTHER_IMAGE_START = new TagInfoLong("OtherImageStart", 0x0201, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN, true); public static final TagInfoLong EXIF_TAG_PREVIEW_IMAGE_LENGTH_MAKER_NOTES = new TagInfoLong("PreviewImageLength", 0x0202, TiffDirectoryType.EXIF_DIRECTORY_MAKER_NOTES); public static final TagInfoLong EXIF_TAG_JPG_FROM_RAW_LENGTH_SUB_IFD = new TagInfoLong("JpgFromRawLength", 0x0202, TiffDirectoryType.EXIF_DIRECTORY_SUB_IFD); public static final TagInfoLong EXIF_TAG_JPG_FROM_RAW_LENGTH_IFD2 = new TagInfoLong("JpgFromRawLength", 0x0202, TiffDirectoryType.EXIF_DIRECTORY_IFD2); public static final TagInfoLong EXIF_TAG_OTHER_IMAGE_LENGTH = new TagInfoLong("OtherImageLength", 0x0202, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoBytes EXIF_TAG_APPLICATION_NOTES = new TagInfoBytes("ApplicationNotes", 0x02bc, -1, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoUnknowns EXIF_TAG_MATTEING = new TagInfoUnknowns("Matteing", 0x80e3, -1, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoUnknowns EXIF_TAG_DATA_TYPE = new TagInfoUnknowns("DataType", 0x80e4, -1, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoUnknowns EXIF_TAG_IMAGE_DEPTH = new TagInfoUnknowns("ImageDepth", 0x80e5, -1, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoUnknowns EXIF_TAG_TILE_DEPTH = new TagInfoUnknowns("TileDepth", 0x80e6, -1, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoUnknowns EXIF_TAG_MODEL_2 = new TagInfoUnknowns("Model2", 0x827d, -1, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoRationals EXIF_TAG_EXPOSURE_TIME = new TagInfoRationals("ExposureTime", 0x829a, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoRationals EXIF_TAG_FNUMBER = new TagInfoRationals("FNumber", 0x829d, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); // FIXME: other types? public static final TagInfoLong EXIF_TAG_IPTC_NAA = new TagInfoLong("IPTC-NAA", 0x83bb, TiffDirectoryType.EXIF_DIRECTORY_IFD0); public static final TagInfoShorts EXIF_TAG_INTERGRAPH_PACKET_DATA = new TagInfoShorts("IntergraphPacketData", 0x847e, -1, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoLongs EXIF_TAG_INTERGRAPH_FLAG_REGISTERS = new TagInfoLongs("IntergraphFlagRegisters", 0x847f, 16, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoAscii EXIF_TAG_SITE = new TagInfoAscii("Site", 0x84e0, -1, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoAscii EXIF_TAG_COLOR_SEQUENCE = new TagInfoAscii("ColorSequence", 0x84e1, -1, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoAscii EXIF_TAG_IT8HEADER = new TagInfoAscii("IT8Header", 0x84e2, -1, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoShort EXIF_TAG_RASTER_PADDING = new TagInfoShort("RasterPadding", 0x84e3, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoShort EXIF_TAG_BITS_PER_RUN_LENGTH = new TagInfoShort("BitsPerRunLength", 0x84e4, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoShort EXIF_TAG_BITS_PER_EXTENDED_RUN_LENGTH = new TagInfoShort("BitsPerExtendedRunLength", 0x84e5, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoBytes EXIF_TAG_COLOR_TABLE = new TagInfoBytes("ColorTable", 0x84e6, -1, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoByte EXIF_TAG_IMAGE_COLOR_INDICATOR = new TagInfoByte("ImageColorIndicator", 0x84e7, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoByte EXIF_TAG_BACKGROUND_COLOR_INDICATOR = new TagInfoByte("BackgroundColorIndicator", 0x84e8, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoBytes EXIF_TAG_IMAGE_COLOR_VALUE = new TagInfoBytes("ImageColorValue", 0x84e9, -1, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoBytes EXIF_TAG_BACKGROUND_COLOR_VALUE = new TagInfoBytes("BackgroundColorValue", 0x84ea, -1, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoBytes EXIF_TAG_PIXEL_INTENSITY_RANGE = new TagInfoBytes("PixelIntensityRange", 0x84eb, -1, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoByte EXIF_TAG_TRANSPARENCY_INDICATOR = new TagInfoByte("TransparencyIndicator", 0x84ec, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoAscii EXIF_TAG_COLOR_CHARACTERIZATION = new TagInfoAscii("ColorCharacterization", 0x84ed, -1, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoShortOrLong EXIF_TAG_HCUSAGE = new TagInfoShortOrLong("HCUsage", 0x84ee, 1, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoAscii EXIF_TAG_SEMINFO = new TagInfoAscii("SEMInfo", 0x8546, 1, TiffDirectoryType.EXIF_DIRECTORY_IFD0); public static final TagInfoLong EXIF_TAG_AFCP_IPTC = new TagInfoLong("AFCP_IPTC", 0x8568, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoLong EXIF_TAG_LEAF_DATA = new TagInfoLong("LeafData", 0x8606, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoBytes EXIF_TAG_PHOTOSHOP_SETTINGS = new TagInfoBytes("PhotoshopSettings", 0x8649, -1, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoDirectory EXIF_TAG_EXIF_OFFSET = new TagInfoDirectory("ExifOffset", 0x8769, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoShort EXIF_TAG_EXPOSURE_PROGRAM = new TagInfoShort("ExposureProgram", 0x8822, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final int EXPOSURE_PROGRAM_VALUE_MANUAL = 1; public static final int EXPOSURE_PROGRAM_VALUE_PROGRAM_AE = 2; public static final int EXPOSURE_PROGRAM_VALUE_APERTURE_PRIORITY_AE = 3; public static final int EXPOSURE_PROGRAM_VALUE_SHUTTER_SPEED_PRIORITY_AE = 4; public static final int EXPOSURE_PROGRAM_VALUE_CREATIVE_SLOW_SPEED = 5; public static final int EXPOSURE_PROGRAM_VALUE_ACTION_HIGH_SPEED = 6; public static final int EXPOSURE_PROGRAM_VALUE_PORTRAIT = 7; public static final int EXPOSURE_PROGRAM_VALUE_LANDSCAPE = 8; public static final TagInfoAscii EXIF_TAG_SPECTRAL_SENSITIVITY = new TagInfoAscii("SpectralSensitivity", 0x8824, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoDirectory EXIF_TAG_GPSINFO = new TagInfoDirectory("GPSInfo", 0x8825, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoShorts EXIF_TAG_ISO = new TagInfoShorts("PhotographicSensitivity", 0x8827, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoUndefineds EXIF_TAG_OPTO_ELECTRIC_CONV_FACTOR = new TagInfoUndefineds("Opto - Electric Conv Factor", 0x8828, -1, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoLong EXIF_TAG_LEAF_SUB_IFD = new TagInfoLong("LeafSubIFD", 0x888a, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoUndefineds EXIF_TAG_EXIF_VERSION = new TagInfoUndefineds("ExifVersion", 0x9000, 4, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoAscii EXIF_TAG_DATE_TIME_ORIGINAL = new TagInfoAscii("DateTimeOriginal", 0x9003, 20, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoAscii EXIF_TAG_DATE_TIME_DIGITIZED = new TagInfoAscii("DateTimeDigitized", 0x9004, 20, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoUndefineds EXIF_TAG_COMPONENTS_CONFIGURATION = new TagInfoUndefineds("ComponentsConfiguration", 0x9101, 4, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoRational EXIF_TAG_COMPRESSED_BITS_PER_PIXEL = new TagInfoRational("CompressedBitsPerPixel", 0x9102, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoSRational EXIF_TAG_SHUTTER_SPEED_VALUE = new TagInfoSRational("ShutterSpeedValue", 0x9201, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoRational EXIF_TAG_APERTURE_VALUE = new TagInfoRational("ApertureValue", 0x9202, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoSRational EXIF_TAG_BRIGHTNESS_VALUE = new TagInfoSRational("BrightnessValue", 0x9203, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoSRational EXIF_TAG_EXPOSURE_COMPENSATION = new TagInfoSRational("ExposureCompensation", 0x9204, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoRational EXIF_TAG_MAX_APERTURE_VALUE = new TagInfoRational("MaxApertureValue", 0x9205, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoRationals EXIF_TAG_SUBJECT_DISTANCE = new TagInfoRationals("Subject Distance", 0x9206, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoShort EXIF_TAG_METERING_MODE = new TagInfoShort("MeteringMode", 0x9207, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final int METERING_MODE_VALUE_AVERAGE = 1; public static final int METERING_MODE_VALUE_CENTER_WEIGHTED_AVERAGE = 2; public static final int METERING_MODE_VALUE_SPOT = 3; public static final int METERING_MODE_VALUE_MULTI_SPOT = 4; public static final int METERING_MODE_VALUE_MULTI_SEGMENT = 5; public static final int METERING_MODE_VALUE_PARTIAL = 6; public static final int METERING_MODE_VALUE_OTHER = 255; public static final TagInfoShort EXIF_TAG_LIGHT_SOURCE = new TagInfoShort("LightSource", 0x9208, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final int LIGHT_SOURCE_VALUE_DAYLIGHT = 1; public static final int LIGHT_SOURCE_VALUE_FLUORESCENT = 2; public static final int LIGHT_SOURCE_VALUE_TUNGSTEN = 3; public static final int LIGHT_SOURCE_VALUE_FLASH = 4; public static final int LIGHT_SOURCE_VALUE_FINE_WEATHER = 9; public static final int LIGHT_SOURCE_VALUE_CLOUDY = 10; public static final int LIGHT_SOURCE_VALUE_SHADE = 11; public static final int LIGHT_SOURCE_VALUE_DAYLIGHT_FLUORESCENT = 12; public static final int LIGHT_SOURCE_VALUE_DAY_WHITE_FLUORESCENT = 13; public static final int LIGHT_SOURCE_VALUE_COOL_WHITE_FLUORESCENT = 14; public static final int LIGHT_SOURCE_VALUE_WHITE_FLUORESCENT = 15; public static final int LIGHT_SOURCE_VALUE_STANDARD_LIGHT_A = 17; public static final int LIGHT_SOURCE_VALUE_STANDARD_LIGHT_B = 18; public static final int LIGHT_SOURCE_VALUE_STANDARD_LIGHT_C = 19; public static final int LIGHT_SOURCE_VALUE_D55 = 20; public static final int LIGHT_SOURCE_VALUE_D65 = 21; public static final int LIGHT_SOURCE_VALUE_D75 = 22; public static final int LIGHT_SOURCE_VALUE_D50 = 23; public static final int LIGHT_SOURCE_VALUE_ISO_STUDIO_TUNGSTEN = 24; public static final int LIGHT_SOURCE_VALUE_OTHER = 255; public static final TagInfoShort EXIF_TAG_FLASH = new TagInfoShort("Flash", 0x9209, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final int FLASH_VALUE_NO_FLASH = 0x0; public static final int FLASH_VALUE_FIRED = 0x1; public static final int FLASH_VALUE_FIRED_RETURN_NOT_DETECTED = 0x5; public static final int FLASH_VALUE_FIRED_RETURN_DETECTED = 0x7; public static final int FLASH_VALUE_ON_DID_NOT_FIRE = 0x8; public static final int FLASH_VALUE_ON = 0x9; public static final int FLASH_VALUE_ON_RETURN_NOT_DETECTED = 0xd; public static final int FLASH_VALUE_ON_RETURN_DETECTED = 0xf; public static final int FLASH_VALUE_OFF = 0x10; public static final int FLASH_VALUE_OFF_DID_NOT_FIRE_RETURN_NOT_DETECTED = 0x14; public static final int FLASH_VALUE_AUTO_DID_NOT_FIRE = 0x18; public static final int FLASH_VALUE_AUTO_FIRED = 0x19; public static final int FLASH_VALUE_AUTO_FIRED_RETURN_NOT_DETECTED = 0x1d; public static final int FLASH_VALUE_AUTO_FIRED_RETURN_DETECTED = 0x1f; public static final int FLASH_VALUE_NO_FLASH_FUNCTION = 0x20; public static final int FLASH_VALUE_OFF_NO_FLASH_FUNCTION = 0x30; public static final int FLASH_VALUE_FIRED_RED_EYE_REDUCTION = 0x41; public static final int FLASH_VALUE_FIRED_RED_EYE_REDUCTION_RETURN_NOT_DETECTED = 0x45; public static final int FLASH_VALUE_FIRED_RED_EYE_REDUCTION_RETURN_DETECTED = 0x47; public static final int FLASH_VALUE_ON_RED_EYE_REDUCTION = 0x49; public static final int FLASH_VALUE_ON_RED_EYE_REDUCTION_RETURN_NOT_DETECTED = 0x4d; public static final int FLASH_VALUE_ON_RED_EYE_REDUCTION_RETURN_DETECTED = 0x4f; public static final int FLASH_VALUE_OFF_RED_EYE_REDUCTION = 0x50; public static final int FLASH_VALUE_AUTO_DID_NOT_FIRE_RED_EYE_REDUCTION = 0x58; public static final int FLASH_VALUE_AUTO_FIRED_RED_EYE_REDUCTION = 0x59; public static final int FLASH_VALUE_AUTO_FIRED_RED_EYE_REDUCTION_RETURN_NOT_DETECTED = 0x5d; public static final int FLASH_VALUE_AUTO_FIRED_RED_EYE_REDUCTION_RETURN_DETECTED = 0x5f; public static final TagInfoRationals EXIF_TAG_FOCAL_LENGTH = new TagInfoRationals("FocalLength", 0x920a, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoShorts EXIF_TAG_SUBJECT_AREA = new TagInfoShorts("SubjectArea", 0x9214, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoDouble EXIF_TAG_STO_NITS = new TagInfoDouble("StoNits", 0x923f, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoUndefineds EXIF_TAG_MAKER_NOTE = new TagInfoUndefineds("MakerNote", 0x927c, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoGpsText EXIF_TAG_USER_COMMENT = new TagInfoGpsText("UserComment", 0x9286, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoAscii EXIF_TAG_SUB_SEC_TIME = new TagInfoAscii("SubSecTime", 0x9290, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoAscii EXIF_TAG_SUB_SEC_TIME_ORIGINAL = new TagInfoAscii("SubSecTimeOriginal", 0x9291, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoAscii EXIF_TAG_SUB_SEC_TIME_DIGITIZED = new TagInfoAscii("SubSecTimeDigitized", 0x9292, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoUndefineds EXIF_TAG_FLASHPIX_VERSION = new TagInfoUndefineds("FlashpixVersion", 0xa000, 4, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoShort EXIF_TAG_EXIF_IMAGE_WIDTH = new TagInfoShort("ExifImageWidth", 0xa002, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoShort EXIF_TAG_EXIF_IMAGE_LENGTH = new TagInfoShort("ExifImageLength", 0xa003, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoAscii EXIF_TAG_RELATED_SOUND_FILE = new TagInfoAscii("RelatedSoundFile", 0xa004, 13, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoDirectory EXIF_TAG_INTEROP_OFFSET = new TagInfoDirectory("InteropOffset", 0xa005, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoRationals EXIF_TAG_FLASH_ENERGY_EXIF_IFD = new TagInfoRationals("FlashEnergy", 0xa20b, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoUndefineds EXIF_TAG_SPATIAL_FREQUENCY_RESPONSE_2 = new TagInfoUndefineds("SpatialFrequencyResponse", 0xa20c, -1, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoUnknowns EXIF_TAG_NOISE_2 = new TagInfoUnknowns("Noise", 0xa20d, -1, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoRational EXIF_TAG_FOCAL_PLANE_XRESOLUTION_EXIF_IFD = new TagInfoRational("FocalPlaneXResolution", 0xa20e, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoRational EXIF_TAG_FOCAL_PLANE_YRESOLUTION_EXIF_IFD = new TagInfoRational("FocalPlaneYResolution", 0xa20f, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoShort EXIF_TAG_FOCAL_PLANE_RESOLUTION_UNIT_EXIF_IFD = new TagInfoShort("FocalPlaneResolutionUnit", 0xa210, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final int FOCAL_PLANE_RESOLUTION_UNIT_EXIF_IFD_VALUE_NONE = 1; public static final int FOCAL_PLANE_RESOLUTION_UNIT_EXIF_IFD_VALUE_INCHES = 2; public static final int FOCAL_PLANE_RESOLUTION_UNIT_EXIF_IFD_VALUE_CM = 3; public static final int FOCAL_PLANE_RESOLUTION_UNIT_EXIF_IFD_VALUE_MM = 4; public static final int FOCAL_PLANE_RESOLUTION_UNIT_EXIF_IFD_VALUE_UM = 5; public static final TagInfoUnknowns EXIF_TAG_IMAGE_NUMBER = new TagInfoUnknowns("ImageNumber", 0xa211, -1, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoUnknowns EXIF_TAG_SECURITY_CLASSIFICATION = new TagInfoUnknowns("SecurityClassification", 0xa212, -1, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoUnknowns EXIF_TAG_IMAGE_HISTORY = new TagInfoUnknowns("ImageHistory", 0xa213, -1, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoShorts EXIF_TAG_SUBJECT_LOCATION = new TagInfoShorts("SubjectLocation", 0xa214, 2, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoRational EXIF_TAG_EXPOSURE_INDEX_EXIF_IFD = new TagInfoRational("ExposureIndex", 0xa215, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoUnknowns EXIF_TAG_TIFF_EPSTANDARD_ID_2 = new TagInfoUnknowns("TIFF-EPStandardID", 0xa216, -1, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoShort EXIF_TAG_SENSING_METHOD_EXIF_IFD = new TagInfoShort("SensingMethod", 0xa217, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final int SENSING_METHOD_EXIF_IFD_VALUE_NOT_DEFINED = 1; public static final int SENSING_METHOD_EXIF_IFD_VALUE_ONE_CHIP_COLOR_AREA = 2; public static final int SENSING_METHOD_EXIF_IFD_VALUE_TWO_CHIP_COLOR_AREA = 3; public static final int SENSING_METHOD_EXIF_IFD_VALUE_THREE_CHIP_COLOR_AREA = 4; public static final int SENSING_METHOD_EXIF_IFD_VALUE_COLOR_SEQUENTIAL_AREA = 5; public static final int SENSING_METHOD_EXIF_IFD_VALUE_TRILINEAR = 7; public static final int SENSING_METHOD_EXIF_IFD_VALUE_COLOR_SEQUENTIAL_LINEAR = 8; public static final TagInfoUndefined EXIF_TAG_FILE_SOURCE = new TagInfoUndefined("FileSource", 0xa300, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final int FILE_SOURCE_VALUE_FILM_SCANNER = 1; public static final int FILE_SOURCE_VALUE_REFLECTION_PRINT_SCANNER = 2; public static final int FILE_SOURCE_VALUE_DIGITAL_CAMERA = 3; public static final TagInfoUndefined EXIF_TAG_SCENE_TYPE = new TagInfoUndefined("SceneType", 0xa301, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoUndefineds EXIF_TAG_CFAPATTERN = new TagInfoUndefineds("CFAPattern", 0xa302, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoShort EXIF_TAG_CUSTOM_RENDERED = new TagInfoShort("CustomRendered", 0xa401, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final int CUSTOM_RENDERED_VALUE_NORMAL = 0; public static final int CUSTOM_RENDERED_VALUE_CUSTOM = 1; public static final TagInfoShort EXIF_TAG_EXPOSURE_MODE = new TagInfoShort("ExposureMode", 0xa402, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final int EXPOSURE_MODE_VALUE_AUTO = 0; public static final int EXPOSURE_MODE_VALUE_MANUAL = 1; public static final int EXPOSURE_MODE_VALUE_AUTO_BRACKET = 2; public static final TagInfoShort EXIF_TAG_WHITE_BALANCE_1 = new TagInfoShort("WhiteBalance", 0xa403, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final int WHITE_BALANCE_1_VALUE_AUTO = 0; public static final int WHITE_BALANCE_1_VALUE_MANUAL = 1; public static final TagInfoRational EXIF_TAG_DIGITAL_ZOOM_RATIO = new TagInfoRational("DigitalZoomRatio", 0xa404, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoShort EXIF_TAG_FOCAL_LENGTH_IN_35MM_FORMAT = new TagInfoShort("FocalLengthIn35mmFormat", 0xa405, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoShort EXIF_TAG_SCENE_CAPTURE_TYPE = new TagInfoShort("SceneCaptureType", 0xa406, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final int SCENE_CAPTURE_TYPE_VALUE_STANDARD = 0; public static final int SCENE_CAPTURE_TYPE_VALUE_LANDSCAPE = 1; public static final int SCENE_CAPTURE_TYPE_VALUE_PORTRAIT = 2; public static final int SCENE_CAPTURE_TYPE_VALUE_NIGHT = 3; public static final TagInfoShort EXIF_TAG_GAIN_CONTROL = new TagInfoShort("GainControl", 0xa407, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final int GAIN_CONTROL_VALUE_NONE = 0; public static final int GAIN_CONTROL_VALUE_LOW_GAIN_UP = 1; public static final int GAIN_CONTROL_VALUE_HIGH_GAIN_UP = 2; public static final int GAIN_CONTROL_VALUE_LOW_GAIN_DOWN = 3; public static final int GAIN_CONTROL_VALUE_HIGH_GAIN_DOWN = 4; public static final TagInfoShort EXIF_TAG_CONTRAST_1 = new TagInfoShort("Contrast", 0xa408, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final int CONTRAST_1_VALUE_NORMAL = 0; public static final int CONTRAST_1_VALUE_LOW = 1; public static final int CONTRAST_1_VALUE_HIGH = 2; public static final TagInfoShort EXIF_TAG_SATURATION_1 = new TagInfoShort("Saturation", 0xa409, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final int SATURATION_1_VALUE_NORMAL = 0; public static final int SATURATION_1_VALUE_LOW = 1; public static final int SATURATION_1_VALUE_HIGH = 2; public static final TagInfoShort EXIF_TAG_SHARPNESS_1 = new TagInfoShort("Sharpness", 0xa40a, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final int SHARPNESS_1_VALUE_NORMAL = 0; public static final int SHARPNESS_1_VALUE_SOFT = 1; public static final int SHARPNESS_1_VALUE_HARD = 2; public static final TagInfoUndefineds EXIF_TAG_DEVICE_SETTING_DESCRIPTION = new TagInfoUndefineds("DeviceSettingDescription", 0xa40b, -1, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoShort EXIF_TAG_SUBJECT_DISTANCE_RANGE = new TagInfoShort("SubjectDistanceRange", 0xa40c, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final int SUBJECT_DISTANCE_RANGE_VALUE_MACRO = 1; public static final int SUBJECT_DISTANCE_RANGE_VALUE_CLOSE = 2; public static final int SUBJECT_DISTANCE_RANGE_VALUE_DISTANT = 3; public static final TagInfoAscii EXIF_TAG_IMAGE_UNIQUE_ID = new TagInfoAscii("ImageUniqueID", 0xa420, 33, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoAscii EXIF_TAG_CAMERA_OWNER_NAME = new TagInfoAscii("CameraOwnerName", 0xa430, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoAscii EXIF_TAG_BODY_SERIAL_NUMBER = new TagInfoAscii("BodySerialNumber", 0xa431, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoRationals EXIF_TAG_LENS_SPECIFICATION = new TagInfoRationals("LensSpecification", 0xa432, 4, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoAscii EXIF_TAG_LENS_MAKE = new TagInfoAscii("LensMake", 0xa433, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoAscii EXIF_TAG_LENS_MODEL = new TagInfoAscii("LensModel", 0xa434, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoAscii EXIF_TAG_LENS_SERIAL_NUMBER = new TagInfoAscii("LensSerialNumber", 0xa435, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoRational EXIF_TAG_GAMMA = new TagInfoRational("Gamma", 0xa500, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoUnknowns EXIF_TAG_ANNOTATIONS = new TagInfoUnknowns("Annotations", 0xc44f, -1, TiffDirectoryType.EXIF_DIRECTORY_UNKNOWN); public static final TagInfoUndefined EXIF_TAG_PRINT_IM = new TagInfoUndefined("PrintIM", 0xc4a5, TiffDirectoryType.EXIF_DIRECTORY_IFD0); public static final TagInfoSLong EXIF_TAG_OFFSET_SCHEMA = new TagInfoSLong("OffsetSchema", 0xea1d, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoAscii EXIF_TAG_OWNER_NAME = new TagInfoAscii("OwnerName", 0xfde8, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoAscii EXIF_TAG_SERIAL_NUMBER = new TagInfoAscii("SerialNumber", 0xfde9, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoAscii EXIF_TAG_LENS = new TagInfoAscii("Lens", 0xfdea, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoAscii EXIF_TAG_RAW_FILE = new TagInfoAscii("RawFile", 0xfe4c, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoAscii EXIF_TAG_CONVERTER = new TagInfoAscii("Converter", 0xfe4d, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoAscii EXIF_TAG_WHITE_BALANCE_2 = new TagInfoAscii("WhiteBalance", 0xfe4e, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoAscii EXIF_TAG_EXPOSURE = new TagInfoAscii("Exposure", 0xfe51, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoAscii EXIF_TAG_SHADOWS = new TagInfoAscii("Shadows", 0xfe52, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoAscii EXIF_TAG_BRIGHTNESS = new TagInfoAscii("Brightness", 0xfe53, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoAscii EXIF_TAG_CONTRAST_2 = new TagInfoAscii("Contrast", 0xfe54, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoAscii EXIF_TAG_SATURATION_2 = new TagInfoAscii("Saturation", 0xfe55, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoAscii EXIF_TAG_SHARPNESS_2 = new TagInfoAscii("Sharpness", 0xfe56, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoAscii EXIF_TAG_SMOOTHNESS = new TagInfoAscii("Smoothness", 0xfe57, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final TagInfoAscii EXIF_TAG_MOIRE_FILTER = new TagInfoAscii("MoireFilter", 0xfe58, -1, TiffDirectoryType.EXIF_DIRECTORY_EXIF_IFD); public static final List<TagInfo> ALL_EXIF_TAGS = Collections.unmodifiableList(Arrays.asList(EXIF_TAG_INTEROPERABILITY_INDEX, EXIF_TAG_INTEROPERABILITY_VERSION, EXIF_TAG_PROCESSING_SOFTWARE, EXIF_TAG_SOFTWARE, EXIF_TAG_PREVIEW_IMAGE_START_IFD0, EXIF_TAG_PREVIEW_IMAGE_START_SUB_IFD1, EXIF_TAG_JPG_FROM_RAW_START_SUB_IFD2, EXIF_TAG_PREVIEW_IMAGE_LENGTH_IFD0, EXIF_TAG_PREVIEW_IMAGE_LENGTH_SUB_IFD1, EXIF_TAG_JPG_FROM_RAW_LENGTH_SUB_IFD2, EXIF_TAG_PREVIEW_IMAGE_START_MAKER_NOTES, EXIF_TAG_JPG_FROM_RAW_START_SUB_IFD, EXIF_TAG_JPG_FROM_RAW_START_IFD2, EXIF_TAG_OTHER_IMAGE_START, EXIF_TAG_PREVIEW_IMAGE_LENGTH_MAKER_NOTES, EXIF_TAG_JPG_FROM_RAW_LENGTH_SUB_IFD, EXIF_TAG_JPG_FROM_RAW_LENGTH_IFD2, EXIF_TAG_OTHER_IMAGE_LENGTH, EXIF_TAG_APPLICATION_NOTES, EXIF_TAG_MATTEING, EXIF_TAG_DATA_TYPE, EXIF_TAG_IMAGE_DEPTH, EXIF_TAG_TILE_DEPTH, EXIF_TAG_MODEL_2, EXIF_TAG_EXPOSURE_TIME, EXIF_TAG_FNUMBER, EXIF_TAG_IPTC_NAA, EXIF_TAG_INTERGRAPH_PACKET_DATA, EXIF_TAG_INTERGRAPH_FLAG_REGISTERS, EXIF_TAG_SITE, EXIF_TAG_COLOR_SEQUENCE, EXIF_TAG_IT8HEADER, EXIF_TAG_RASTER_PADDING, EXIF_TAG_BITS_PER_RUN_LENGTH, EXIF_TAG_BITS_PER_EXTENDED_RUN_LENGTH, EXIF_TAG_COLOR_TABLE, EXIF_TAG_IMAGE_COLOR_INDICATOR, EXIF_TAG_BACKGROUND_COLOR_INDICATOR, EXIF_TAG_IMAGE_COLOR_VALUE, EXIF_TAG_BACKGROUND_COLOR_VALUE, EXIF_TAG_PIXEL_INTENSITY_RANGE, EXIF_TAG_TRANSPARENCY_INDICATOR, EXIF_TAG_COLOR_CHARACTERIZATION, EXIF_TAG_HCUSAGE, EXIF_TAG_SEMINFO, EXIF_TAG_AFCP_IPTC, EXIF_TAG_LEAF_DATA, EXIF_TAG_PHOTOSHOP_SETTINGS, EXIF_TAG_EXIF_OFFSET, EXIF_TAG_EXPOSURE_PROGRAM, EXIF_TAG_SPECTRAL_SENSITIVITY, EXIF_TAG_GPSINFO, EXIF_TAG_ISO, EXIF_TAG_OPTO_ELECTRIC_CONV_FACTOR, EXIF_TAG_LEAF_SUB_IFD, EXIF_TAG_EXIF_VERSION, EXIF_TAG_DATE_TIME_ORIGINAL, EXIF_TAG_DATE_TIME_DIGITIZED, EXIF_TAG_COMPONENTS_CONFIGURATION, EXIF_TAG_COMPRESSED_BITS_PER_PIXEL, EXIF_TAG_SHUTTER_SPEED_VALUE, EXIF_TAG_APERTURE_VALUE, EXIF_TAG_BRIGHTNESS_VALUE, EXIF_TAG_EXPOSURE_COMPENSATION, EXIF_TAG_MAX_APERTURE_VALUE, EXIF_TAG_SUBJECT_DISTANCE, EXIF_TAG_IMAGE_UNIQUE_ID, EXIF_TAG_CAMERA_OWNER_NAME, EXIF_TAG_BODY_SERIAL_NUMBER, EXIF_TAG_LENS_SPECIFICATION, EXIF_TAG_LENS_MAKE, EXIF_TAG_LENS_MODEL, EXIF_TAG_LENS_SERIAL_NUMBER, EXIF_TAG_METERING_MODE, EXIF_TAG_LIGHT_SOURCE, EXIF_TAG_FLASH, EXIF_TAG_FOCAL_LENGTH, EXIF_TAG_SUBJECT_AREA, EXIF_TAG_STO_NITS, EXIF_TAG_SUB_SEC_TIME, EXIF_TAG_SUB_SEC_TIME_ORIGINAL, EXIF_TAG_SUB_SEC_TIME_DIGITIZED, EXIF_TAG_FLASHPIX_VERSION, EXIF_TAG_EXIF_IMAGE_WIDTH, EXIF_TAG_EXIF_IMAGE_LENGTH, EXIF_TAG_RELATED_SOUND_FILE, EXIF_TAG_INTEROP_OFFSET, EXIF_TAG_FLASH_ENERGY_EXIF_IFD, EXIF_TAG_SPATIAL_FREQUENCY_RESPONSE_2, EXIF_TAG_NOISE_2, EXIF_TAG_FOCAL_PLANE_XRESOLUTION_EXIF_IFD, EXIF_TAG_FOCAL_PLANE_YRESOLUTION_EXIF_IFD, EXIF_TAG_FOCAL_PLANE_RESOLUTION_UNIT_EXIF_IFD, EXIF_TAG_IMAGE_NUMBER, EXIF_TAG_SECURITY_CLASSIFICATION, EXIF_TAG_IMAGE_HISTORY, EXIF_TAG_SUBJECT_LOCATION, EXIF_TAG_EXPOSURE_INDEX_EXIF_IFD, EXIF_TAG_TIFF_EPSTANDARD_ID_2, EXIF_TAG_SENSING_METHOD_EXIF_IFD, EXIF_TAG_FILE_SOURCE, EXIF_TAG_SCENE_TYPE, EXIF_TAG_CFAPATTERN, EXIF_TAG_CUSTOM_RENDERED, EXIF_TAG_EXPOSURE_MODE, EXIF_TAG_WHITE_BALANCE_1, EXIF_TAG_DIGITAL_ZOOM_RATIO, EXIF_TAG_FOCAL_LENGTH_IN_35MM_FORMAT, EXIF_TAG_SCENE_CAPTURE_TYPE, EXIF_TAG_GAIN_CONTROL, EXIF_TAG_CONTRAST_1, EXIF_TAG_SATURATION_1, EXIF_TAG_SHARPNESS_1, EXIF_TAG_DEVICE_SETTING_DESCRIPTION, EXIF_TAG_SUBJECT_DISTANCE_RANGE, EXIF_TAG_IMAGE_UNIQUE_ID, EXIF_TAG_GAMMA, EXIF_TAG_ANNOTATIONS, EXIF_TAG_PRINT_IM, EXIF_TAG_OFFSET_SCHEMA, EXIF_TAG_OWNER_NAME, EXIF_TAG_SERIAL_NUMBER, EXIF_TAG_LENS, EXIF_TAG_RAW_FILE, EXIF_TAG_CONVERTER, EXIF_TAG_WHITE_BALANCE_2, EXIF_TAG_EXPOSURE, EXIF_TAG_SHADOWS, EXIF_TAG_BRIGHTNESS, EXIF_TAG_CONTRAST_2, EXIF_TAG_SATURATION_2, EXIF_TAG_SHARPNESS_2, EXIF_TAG_SMOOTHNESS, EXIF_TAG_MOIRE_FILTER, EXIF_TAG_USER_COMMENT, // EXIF_TAG_MAKER_NOTE)); private ExifTagConstants() { } }
googleapis/sdk-platform-java
35,458
java-showcase/gapic-showcase/src/main/java/com/google/showcase/v1beta1/stub/GrpcMessagingStub.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.showcase.v1beta1.stub; import static com.google.showcase.v1beta1.MessagingClient.ListBlurbsPagedResponse; import static com.google.showcase.v1beta1.MessagingClient.ListLocationsPagedResponse; import static com.google.showcase.v1beta1.MessagingClient.ListRoomsPagedResponse; import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcStubCallableFactory; import com.google.api.gax.rpc.BidiStreamingCallable; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.ClientStreamingCallable; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.RequestParamsBuilder; import com.google.api.gax.rpc.ServerStreamingCallable; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.location.GetLocationRequest; import com.google.cloud.location.ListLocationsRequest; import com.google.cloud.location.ListLocationsResponse; import com.google.cloud.location.Location; import com.google.iam.v1.GetIamPolicyRequest; import com.google.iam.v1.Policy; import com.google.iam.v1.SetIamPolicyRequest; import com.google.iam.v1.TestIamPermissionsRequest; import com.google.iam.v1.TestIamPermissionsResponse; import com.google.longrunning.Operation; import com.google.longrunning.stub.GrpcOperationsStub; import com.google.protobuf.Empty; import com.google.showcase.v1beta1.Blurb; import com.google.showcase.v1beta1.ConnectRequest; import com.google.showcase.v1beta1.CreateBlurbRequest; import com.google.showcase.v1beta1.CreateRoomRequest; import com.google.showcase.v1beta1.DeleteBlurbRequest; import com.google.showcase.v1beta1.DeleteRoomRequest; import com.google.showcase.v1beta1.GetBlurbRequest; import com.google.showcase.v1beta1.GetRoomRequest; import com.google.showcase.v1beta1.ListBlurbsRequest; import com.google.showcase.v1beta1.ListBlurbsResponse; import com.google.showcase.v1beta1.ListRoomsRequest; import com.google.showcase.v1beta1.ListRoomsResponse; import com.google.showcase.v1beta1.Room; import com.google.showcase.v1beta1.SearchBlurbsMetadata; import com.google.showcase.v1beta1.SearchBlurbsRequest; import com.google.showcase.v1beta1.SearchBlurbsResponse; import com.google.showcase.v1beta1.SendBlurbsResponse; import com.google.showcase.v1beta1.StreamBlurbsRequest; import com.google.showcase.v1beta1.StreamBlurbsResponse; import com.google.showcase.v1beta1.UpdateBlurbRequest; import com.google.showcase.v1beta1.UpdateRoomRequest; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; import java.io.IOException; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * gRPC stub implementation for the Messaging service API. * * <p>This class is for advanced usage and reflects the underlying API directly. */ @BetaApi @Generated("by gapic-generator-java") public class GrpcMessagingStub extends MessagingStub { private static final MethodDescriptor<CreateRoomRequest, Room> createRoomMethodDescriptor = MethodDescriptor.<CreateRoomRequest, Room>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.showcase.v1beta1.Messaging/CreateRoom") .setRequestMarshaller(ProtoUtils.marshaller(CreateRoomRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Room.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<GetRoomRequest, Room> getRoomMethodDescriptor = MethodDescriptor.<GetRoomRequest, Room>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.showcase.v1beta1.Messaging/GetRoom") .setRequestMarshaller(ProtoUtils.marshaller(GetRoomRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Room.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<UpdateRoomRequest, Room> updateRoomMethodDescriptor = MethodDescriptor.<UpdateRoomRequest, Room>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.showcase.v1beta1.Messaging/UpdateRoom") .setRequestMarshaller(ProtoUtils.marshaller(UpdateRoomRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Room.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<DeleteRoomRequest, Empty> deleteRoomMethodDescriptor = MethodDescriptor.<DeleteRoomRequest, Empty>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.showcase.v1beta1.Messaging/DeleteRoom") .setRequestMarshaller(ProtoUtils.marshaller(DeleteRoomRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<ListRoomsRequest, ListRoomsResponse> listRoomsMethodDescriptor = MethodDescriptor.<ListRoomsRequest, ListRoomsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.showcase.v1beta1.Messaging/ListRooms") .setRequestMarshaller(ProtoUtils.marshaller(ListRoomsRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(ListRoomsResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<CreateBlurbRequest, Blurb> createBlurbMethodDescriptor = MethodDescriptor.<CreateBlurbRequest, Blurb>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.showcase.v1beta1.Messaging/CreateBlurb") .setRequestMarshaller(ProtoUtils.marshaller(CreateBlurbRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Blurb.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<GetBlurbRequest, Blurb> getBlurbMethodDescriptor = MethodDescriptor.<GetBlurbRequest, Blurb>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.showcase.v1beta1.Messaging/GetBlurb") .setRequestMarshaller(ProtoUtils.marshaller(GetBlurbRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Blurb.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<UpdateBlurbRequest, Blurb> updateBlurbMethodDescriptor = MethodDescriptor.<UpdateBlurbRequest, Blurb>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.showcase.v1beta1.Messaging/UpdateBlurb") .setRequestMarshaller(ProtoUtils.marshaller(UpdateBlurbRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Blurb.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<DeleteBlurbRequest, Empty> deleteBlurbMethodDescriptor = MethodDescriptor.<DeleteBlurbRequest, Empty>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.showcase.v1beta1.Messaging/DeleteBlurb") .setRequestMarshaller(ProtoUtils.marshaller(DeleteBlurbRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<ListBlurbsRequest, ListBlurbsResponse> listBlurbsMethodDescriptor = MethodDescriptor.<ListBlurbsRequest, ListBlurbsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.showcase.v1beta1.Messaging/ListBlurbs") .setRequestMarshaller(ProtoUtils.marshaller(ListBlurbsRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(ListBlurbsResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<SearchBlurbsRequest, Operation> searchBlurbsMethodDescriptor = MethodDescriptor.<SearchBlurbsRequest, Operation>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.showcase.v1beta1.Messaging/SearchBlurbs") .setRequestMarshaller(ProtoUtils.marshaller(SearchBlurbsRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<StreamBlurbsRequest, StreamBlurbsResponse> streamBlurbsMethodDescriptor = MethodDescriptor.<StreamBlurbsRequest, StreamBlurbsResponse>newBuilder() .setType(MethodDescriptor.MethodType.SERVER_STREAMING) .setFullMethodName("google.showcase.v1beta1.Messaging/StreamBlurbs") .setRequestMarshaller(ProtoUtils.marshaller(StreamBlurbsRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(StreamBlurbsResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<CreateBlurbRequest, SendBlurbsResponse> sendBlurbsMethodDescriptor = MethodDescriptor.<CreateBlurbRequest, SendBlurbsResponse>newBuilder() .setType(MethodDescriptor.MethodType.CLIENT_STREAMING) .setFullMethodName("google.showcase.v1beta1.Messaging/SendBlurbs") .setRequestMarshaller(ProtoUtils.marshaller(CreateBlurbRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(SendBlurbsResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<ConnectRequest, StreamBlurbsResponse> connectMethodDescriptor = MethodDescriptor.<ConnectRequest, StreamBlurbsResponse>newBuilder() .setType(MethodDescriptor.MethodType.BIDI_STREAMING) .setFullMethodName("google.showcase.v1beta1.Messaging/Connect") .setRequestMarshaller(ProtoUtils.marshaller(ConnectRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(StreamBlurbsResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<ListLocationsRequest, ListLocationsResponse> listLocationsMethodDescriptor = MethodDescriptor.<ListLocationsRequest, ListLocationsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.location.Locations/ListLocations") .setRequestMarshaller( ProtoUtils.marshaller(ListLocationsRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(ListLocationsResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<GetLocationRequest, Location> getLocationMethodDescriptor = MethodDescriptor.<GetLocationRequest, Location>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.location.Locations/GetLocation") .setRequestMarshaller(ProtoUtils.marshaller(GetLocationRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Location.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<SetIamPolicyRequest, Policy> setIamPolicyMethodDescriptor = MethodDescriptor.<SetIamPolicyRequest, Policy>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.iam.v1.IAMPolicy/SetIamPolicy") .setRequestMarshaller(ProtoUtils.marshaller(SetIamPolicyRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Policy.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<GetIamPolicyRequest, Policy> getIamPolicyMethodDescriptor = MethodDescriptor.<GetIamPolicyRequest, Policy>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.iam.v1.IAMPolicy/GetIamPolicy") .setRequestMarshaller(ProtoUtils.marshaller(GetIamPolicyRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Policy.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsMethodDescriptor = MethodDescriptor.<TestIamPermissionsRequest, TestIamPermissionsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.iam.v1.IAMPolicy/TestIamPermissions") .setRequestMarshaller( ProtoUtils.marshaller(TestIamPermissionsRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(TestIamPermissionsResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private final UnaryCallable<CreateRoomRequest, Room> createRoomCallable; private final UnaryCallable<GetRoomRequest, Room> getRoomCallable; private final UnaryCallable<UpdateRoomRequest, Room> updateRoomCallable; private final UnaryCallable<DeleteRoomRequest, Empty> deleteRoomCallable; private final UnaryCallable<ListRoomsRequest, ListRoomsResponse> listRoomsCallable; private final UnaryCallable<ListRoomsRequest, ListRoomsPagedResponse> listRoomsPagedCallable; private final UnaryCallable<CreateBlurbRequest, Blurb> createBlurbCallable; private final UnaryCallable<GetBlurbRequest, Blurb> getBlurbCallable; private final UnaryCallable<UpdateBlurbRequest, Blurb> updateBlurbCallable; private final UnaryCallable<DeleteBlurbRequest, Empty> deleteBlurbCallable; private final UnaryCallable<ListBlurbsRequest, ListBlurbsResponse> listBlurbsCallable; private final UnaryCallable<ListBlurbsRequest, ListBlurbsPagedResponse> listBlurbsPagedCallable; private final UnaryCallable<SearchBlurbsRequest, Operation> searchBlurbsCallable; private final OperationCallable<SearchBlurbsRequest, SearchBlurbsResponse, SearchBlurbsMetadata> searchBlurbsOperationCallable; private final ServerStreamingCallable<StreamBlurbsRequest, StreamBlurbsResponse> streamBlurbsCallable; private final ClientStreamingCallable<CreateBlurbRequest, SendBlurbsResponse> sendBlurbsCallable; private final BidiStreamingCallable<ConnectRequest, StreamBlurbsResponse> connectCallable; private final UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable; private final UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse> listLocationsPagedCallable; private final UnaryCallable<GetLocationRequest, Location> getLocationCallable; private final UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable; private final UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable; private final UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsCallable; private final BackgroundResource backgroundResources; private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcMessagingStub create(MessagingStubSettings settings) throws IOException { return new GrpcMessagingStub(settings, ClientContext.create(settings)); } public static final GrpcMessagingStub create(ClientContext clientContext) throws IOException { return new GrpcMessagingStub(MessagingStubSettings.newBuilder().build(), clientContext); } public static final GrpcMessagingStub create( ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { return new GrpcMessagingStub( MessagingStubSettings.newBuilder().build(), clientContext, callableFactory); } /** * Constructs an instance of GrpcMessagingStub, using the given settings. This is protected so * that it is easy to make a subclass, but otherwise, the static factory methods should be * preferred. */ protected GrpcMessagingStub(MessagingStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new GrpcMessagingCallableFactory()); } /** * Constructs an instance of GrpcMessagingStub, using the given settings. This is protected so * that it is easy to make a subclass, but otherwise, the static factory methods should be * preferred. */ protected GrpcMessagingStub( MessagingStubSettings settings, ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); GrpcCallSettings<CreateRoomRequest, Room> createRoomTransportSettings = GrpcCallSettings.<CreateRoomRequest, Room>newBuilder() .setMethodDescriptor(createRoomMethodDescriptor) .build(); GrpcCallSettings<GetRoomRequest, Room> getRoomTransportSettings = GrpcCallSettings.<GetRoomRequest, Room>newBuilder() .setMethodDescriptor(getRoomMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); GrpcCallSettings<UpdateRoomRequest, Room> updateRoomTransportSettings = GrpcCallSettings.<UpdateRoomRequest, Room>newBuilder() .setMethodDescriptor(updateRoomMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("room.name", String.valueOf(request.getRoom().getName())); return builder.build(); }) .build(); GrpcCallSettings<DeleteRoomRequest, Empty> deleteRoomTransportSettings = GrpcCallSettings.<DeleteRoomRequest, Empty>newBuilder() .setMethodDescriptor(deleteRoomMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); GrpcCallSettings<ListRoomsRequest, ListRoomsResponse> listRoomsTransportSettings = GrpcCallSettings.<ListRoomsRequest, ListRoomsResponse>newBuilder() .setMethodDescriptor(listRoomsMethodDescriptor) .build(); GrpcCallSettings<CreateBlurbRequest, Blurb> createBlurbTransportSettings = GrpcCallSettings.<CreateBlurbRequest, Blurb>newBuilder() .setMethodDescriptor(createBlurbMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); GrpcCallSettings<GetBlurbRequest, Blurb> getBlurbTransportSettings = GrpcCallSettings.<GetBlurbRequest, Blurb>newBuilder() .setMethodDescriptor(getBlurbMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); GrpcCallSettings<UpdateBlurbRequest, Blurb> updateBlurbTransportSettings = GrpcCallSettings.<UpdateBlurbRequest, Blurb>newBuilder() .setMethodDescriptor(updateBlurbMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("blurb.name", String.valueOf(request.getBlurb().getName())); return builder.build(); }) .build(); GrpcCallSettings<DeleteBlurbRequest, Empty> deleteBlurbTransportSettings = GrpcCallSettings.<DeleteBlurbRequest, Empty>newBuilder() .setMethodDescriptor(deleteBlurbMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); GrpcCallSettings<ListBlurbsRequest, ListBlurbsResponse> listBlurbsTransportSettings = GrpcCallSettings.<ListBlurbsRequest, ListBlurbsResponse>newBuilder() .setMethodDescriptor(listBlurbsMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); GrpcCallSettings<SearchBlurbsRequest, Operation> searchBlurbsTransportSettings = GrpcCallSettings.<SearchBlurbsRequest, Operation>newBuilder() .setMethodDescriptor(searchBlurbsMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); GrpcCallSettings<StreamBlurbsRequest, StreamBlurbsResponse> streamBlurbsTransportSettings = GrpcCallSettings.<StreamBlurbsRequest, StreamBlurbsResponse>newBuilder() .setMethodDescriptor(streamBlurbsMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); GrpcCallSettings<CreateBlurbRequest, SendBlurbsResponse> sendBlurbsTransportSettings = GrpcCallSettings.<CreateBlurbRequest, SendBlurbsResponse>newBuilder() .setMethodDescriptor(sendBlurbsMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); GrpcCallSettings<ConnectRequest, StreamBlurbsResponse> connectTransportSettings = GrpcCallSettings.<ConnectRequest, StreamBlurbsResponse>newBuilder() .setMethodDescriptor(connectMethodDescriptor) .build(); GrpcCallSettings<ListLocationsRequest, ListLocationsResponse> listLocationsTransportSettings = GrpcCallSettings.<ListLocationsRequest, ListLocationsResponse>newBuilder() .setMethodDescriptor(listLocationsMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); GrpcCallSettings<GetLocationRequest, Location> getLocationTransportSettings = GrpcCallSettings.<GetLocationRequest, Location>newBuilder() .setMethodDescriptor(getLocationMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); GrpcCallSettings<SetIamPolicyRequest, Policy> setIamPolicyTransportSettings = GrpcCallSettings.<SetIamPolicyRequest, Policy>newBuilder() .setMethodDescriptor(setIamPolicyMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("resource", String.valueOf(request.getResource())); return builder.build(); }) .build(); GrpcCallSettings<GetIamPolicyRequest, Policy> getIamPolicyTransportSettings = GrpcCallSettings.<GetIamPolicyRequest, Policy>newBuilder() .setMethodDescriptor(getIamPolicyMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("resource", String.valueOf(request.getResource())); return builder.build(); }) .build(); GrpcCallSettings<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsTransportSettings = GrpcCallSettings.<TestIamPermissionsRequest, TestIamPermissionsResponse>newBuilder() .setMethodDescriptor(testIamPermissionsMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("resource", String.valueOf(request.getResource())); return builder.build(); }) .build(); this.createRoomCallable = callableFactory.createUnaryCallable( createRoomTransportSettings, settings.createRoomSettings(), clientContext); this.getRoomCallable = callableFactory.createUnaryCallable( getRoomTransportSettings, settings.getRoomSettings(), clientContext); this.updateRoomCallable = callableFactory.createUnaryCallable( updateRoomTransportSettings, settings.updateRoomSettings(), clientContext); this.deleteRoomCallable = callableFactory.createUnaryCallable( deleteRoomTransportSettings, settings.deleteRoomSettings(), clientContext); this.listRoomsCallable = callableFactory.createUnaryCallable( listRoomsTransportSettings, settings.listRoomsSettings(), clientContext); this.listRoomsPagedCallable = callableFactory.createPagedCallable( listRoomsTransportSettings, settings.listRoomsSettings(), clientContext); this.createBlurbCallable = callableFactory.createUnaryCallable( createBlurbTransportSettings, settings.createBlurbSettings(), clientContext); this.getBlurbCallable = callableFactory.createUnaryCallable( getBlurbTransportSettings, settings.getBlurbSettings(), clientContext); this.updateBlurbCallable = callableFactory.createUnaryCallable( updateBlurbTransportSettings, settings.updateBlurbSettings(), clientContext); this.deleteBlurbCallable = callableFactory.createUnaryCallable( deleteBlurbTransportSettings, settings.deleteBlurbSettings(), clientContext); this.listBlurbsCallable = callableFactory.createUnaryCallable( listBlurbsTransportSettings, settings.listBlurbsSettings(), clientContext); this.listBlurbsPagedCallable = callableFactory.createPagedCallable( listBlurbsTransportSettings, settings.listBlurbsSettings(), clientContext); this.searchBlurbsCallable = callableFactory.createUnaryCallable( searchBlurbsTransportSettings, settings.searchBlurbsSettings(), clientContext); this.searchBlurbsOperationCallable = callableFactory.createOperationCallable( searchBlurbsTransportSettings, settings.searchBlurbsOperationSettings(), clientContext, operationsStub); this.streamBlurbsCallable = callableFactory.createServerStreamingCallable( streamBlurbsTransportSettings, settings.streamBlurbsSettings(), clientContext); this.sendBlurbsCallable = callableFactory.createClientStreamingCallable( sendBlurbsTransportSettings, settings.sendBlurbsSettings(), clientContext); this.connectCallable = callableFactory.createBidiStreamingCallable( connectTransportSettings, settings.connectSettings(), clientContext); this.listLocationsCallable = callableFactory.createUnaryCallable( listLocationsTransportSettings, settings.listLocationsSettings(), clientContext); this.listLocationsPagedCallable = callableFactory.createPagedCallable( listLocationsTransportSettings, settings.listLocationsSettings(), clientContext); this.getLocationCallable = callableFactory.createUnaryCallable( getLocationTransportSettings, settings.getLocationSettings(), clientContext); this.setIamPolicyCallable = callableFactory.createUnaryCallable( setIamPolicyTransportSettings, settings.setIamPolicySettings(), clientContext); this.getIamPolicyCallable = callableFactory.createUnaryCallable( getIamPolicyTransportSettings, settings.getIamPolicySettings(), clientContext); this.testIamPermissionsCallable = callableFactory.createUnaryCallable( testIamPermissionsTransportSettings, settings.testIamPermissionsSettings(), clientContext); this.backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } public GrpcOperationsStub getOperationsStub() { return operationsStub; } @Override public UnaryCallable<CreateRoomRequest, Room> createRoomCallable() { return createRoomCallable; } @Override public UnaryCallable<GetRoomRequest, Room> getRoomCallable() { return getRoomCallable; } @Override public UnaryCallable<UpdateRoomRequest, Room> updateRoomCallable() { return updateRoomCallable; } @Override public UnaryCallable<DeleteRoomRequest, Empty> deleteRoomCallable() { return deleteRoomCallable; } @Override public UnaryCallable<ListRoomsRequest, ListRoomsResponse> listRoomsCallable() { return listRoomsCallable; } @Override public UnaryCallable<ListRoomsRequest, ListRoomsPagedResponse> listRoomsPagedCallable() { return listRoomsPagedCallable; } @Override public UnaryCallable<CreateBlurbRequest, Blurb> createBlurbCallable() { return createBlurbCallable; } @Override public UnaryCallable<GetBlurbRequest, Blurb> getBlurbCallable() { return getBlurbCallable; } @Override public UnaryCallable<UpdateBlurbRequest, Blurb> updateBlurbCallable() { return updateBlurbCallable; } @Override public UnaryCallable<DeleteBlurbRequest, Empty> deleteBlurbCallable() { return deleteBlurbCallable; } @Override public UnaryCallable<ListBlurbsRequest, ListBlurbsResponse> listBlurbsCallable() { return listBlurbsCallable; } @Override public UnaryCallable<ListBlurbsRequest, ListBlurbsPagedResponse> listBlurbsPagedCallable() { return listBlurbsPagedCallable; } @Override public UnaryCallable<SearchBlurbsRequest, Operation> searchBlurbsCallable() { return searchBlurbsCallable; } @Override public OperationCallable<SearchBlurbsRequest, SearchBlurbsResponse, SearchBlurbsMetadata> searchBlurbsOperationCallable() { return searchBlurbsOperationCallable; } @Override public ServerStreamingCallable<StreamBlurbsRequest, StreamBlurbsResponse> streamBlurbsCallable() { return streamBlurbsCallable; } @Override public ClientStreamingCallable<CreateBlurbRequest, SendBlurbsResponse> sendBlurbsCallable() { return sendBlurbsCallable; } @Override public BidiStreamingCallable<ConnectRequest, StreamBlurbsResponse> connectCallable() { return connectCallable; } @Override public UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable() { return listLocationsCallable; } @Override public UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse> listLocationsPagedCallable() { return listLocationsPagedCallable; } @Override public UnaryCallable<GetLocationRequest, Location> getLocationCallable() { return getLocationCallable; } @Override public UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable() { return setIamPolicyCallable; } @Override public UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable() { return getIamPolicyCallable; } @Override public UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsCallable() { return testIamPermissionsCallable; } @Override public final void close() { try { backgroundResources.close(); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new IllegalStateException("Failed to close resource", e); } } @Override public void shutdown() { backgroundResources.shutdown(); } @Override public boolean isShutdown() { return backgroundResources.isShutdown(); } @Override public boolean isTerminated() { return backgroundResources.isTerminated(); } @Override public void shutdownNow() { backgroundResources.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return backgroundResources.awaitTermination(duration, unit); } }
apache/falcon
35,317
falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedClusterUpdateTest.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.falcon.regression; import org.apache.falcon.regression.Entities.FeedMerlin; import org.apache.falcon.regression.core.bundle.Bundle; import org.apache.falcon.entity.v0.feed.ActionType; import org.apache.falcon.entity.v0.feed.ClusterType; import org.apache.falcon.regression.core.helpers.ColoHelper; import org.apache.falcon.regression.core.response.ServiceResponse; import org.apache.falcon.regression.core.util.AssertUtil; import org.apache.falcon.regression.core.util.BundleUtil; import org.apache.falcon.regression.core.util.HadoopUtil; import org.apache.falcon.regression.core.util.OSUtil; import org.apache.falcon.regression.core.util.OozieUtil; import org.apache.falcon.regression.core.util.TimeUtil; import org.apache.falcon.regression.core.util.Util; import org.apache.falcon.regression.core.util.XmlUtil; import org.apache.falcon.regression.testHelper.BaseTestClass; import org.apache.hadoop.fs.FileSystem; import org.apache.log4j.Logger; import org.apache.oozie.client.OozieClient; import org.testng.Assert; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; /** * Feed cluster update tests. */ @Test(groups = "distributed") public class FeedClusterUpdateTest extends BaseTestClass { private String baseTestDir = cleanAndGetTestDir(); private String aggregateWorkflowDir = baseTestDir + "/aggregator"; private ColoHelper cluster1 = servers.get(0); private ColoHelper cluster2 = servers.get(1); private ColoHelper cluster3 = servers.get(2); private OozieClient cluster1OC = serverOC.get(0); private OozieClient cluster2OC = serverOC.get(1); private OozieClient cluster3OC = serverOC.get(2); private FileSystem cluster2FS = serverFS.get(1); private FileSystem cluster3FS = serverFS.get(2); private String feed; private String feedName; private String startTime; private String feedOriginalSubmit; private String feedUpdated; private String cluster1Name; private String cluster2Name; private String cluster3Name; private static final Logger LOGGER = Logger.getLogger(FeedClusterUpdateTest.class); @BeforeClass(alwaysRun = true) public void createTestData() throws Exception { uploadDirToClusters(aggregateWorkflowDir, OSUtil.RESOURCES_OOZIE); Bundle bundle = BundleUtil.readELBundle(); for (int i = 0; i < 3; i++) { bundles[i] = new Bundle(bundle, servers.get(i)); bundles[i].generateUniqueBundle(this); bundles[i].setProcessWorkflow(aggregateWorkflowDir); } try { String postFix = "/US/" + servers.get(1).getClusterHelper().getColoName(); HadoopUtil.deleteDirIfExists(baseTestDir, cluster2FS); HadoopUtil.lateDataReplenish(cluster2FS, 80, 1, baseTestDir, postFix); postFix = "/UK/" + servers.get(2).getClusterHelper().getColoName(); HadoopUtil.deleteDirIfExists(baseTestDir, cluster3FS); HadoopUtil.lateDataReplenish(cluster3FS, 80, 1, baseTestDir, postFix); } finally { removeTestClassEntities(); } } @BeforeMethod(alwaysRun = true) public void setup() throws Exception { Bundle bundle = BundleUtil.readELBundle(); for (int i = 0; i < 3; i++) { bundles[i] = new Bundle(bundle, servers.get(i)); bundles[i].generateUniqueBundle(this); bundles[i].setProcessWorkflow(aggregateWorkflowDir); } BundleUtil.submitAllClusters(prism, bundles[0], bundles[1], bundles[2]); feed = bundles[0].getDataSets().get(0); feed = FeedMerlin.fromString(feed).clearFeedClusters().toString(); startTime = TimeUtil.getTimeWrtSystemTime(-50); feedName = Util.readEntityName(feed); cluster1Name = Util.readEntityName(bundles[0].getClusters().get(0)); cluster2Name = Util.readEntityName(bundles[1].getClusters().get(0)); cluster3Name = Util.readEntityName(bundles[2].getClusters().get(0)); } @AfterMethod(alwaysRun = true) public void tearDown() { removeTestClassEntities(); } @Test(enabled = true, groups = {"multiCluster"}) public void addSourceCluster() throws Exception { //add one source and one target , schedule only on source feedOriginalSubmit = FeedMerlin.fromString(feed) .addFeedCluster(new FeedMerlin.FeedClusterBuilder(cluster2Name) .withRetention("hours(10)", ActionType.DELETE) .withValidity(startTime, TimeUtil.addMinsToTime(startTime, 65)) .withClusterType(ClusterType.SOURCE) .build()) .toString(); feedOriginalSubmit = FeedMerlin.fromString(feedOriginalSubmit).addFeedCluster( new FeedMerlin.FeedClusterBuilder(cluster1Name) .withRetention("hours(10)", ActionType.DELETE) .withValidity(TimeUtil.addMinsToTime(startTime, 20), TimeUtil.addMinsToTime(startTime, 85)) .withClusterType(ClusterType.TARGET) .build()) .toString(); LOGGER.info("Feed: " + Util.prettyPrintXml(feedOriginalSubmit)); ServiceResponse response = prism.getFeedHelper().submitEntity(feedOriginalSubmit); TimeUtil.sleepSeconds(10); AssertUtil.assertSucceeded(response); //schedule on source response = cluster2.getFeedHelper().schedule(feedOriginalSubmit); TimeUtil.sleepSeconds(20); AssertUtil.assertSucceeded(response); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster2OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster2OC, feedName, "RETENTION"), 1); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster3OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster3OC, feedName, "RETENTION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster1OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster1OC, feedName, "RETENTION"), 0); //prepare updated Feed feedUpdated = FeedMerlin.fromString(feed) .addFeedCluster(new FeedMerlin.FeedClusterBuilder(cluster2Name) .withRetention("hours(10)", ActionType.DELETE) .withValidity(startTime, TimeUtil.addMinsToTime(startTime, 65)) .withClusterType(ClusterType.SOURCE) .withPartition("UK/${cluster.colo}") .build()) .toString(); feedUpdated = FeedMerlin.fromString(feedUpdated).addFeedCluster( new FeedMerlin.FeedClusterBuilder(cluster1Name) .withRetention("hours(10)", ActionType.DELETE) .withValidity(TimeUtil.addMinsToTime(startTime, 20), TimeUtil.addMinsToTime(startTime, 85)) .withClusterType(ClusterType.TARGET) .build()) .toString(); feedUpdated = FeedMerlin.fromString(feedUpdated).addFeedCluster( new FeedMerlin.FeedClusterBuilder(cluster3Name) .withRetention("hours(10)", ActionType.DELETE) .withValidity(TimeUtil.addMinsToTime(startTime, 40), TimeUtil.addMinsToTime(startTime, 110)) .withClusterType(ClusterType.SOURCE) .withPartition("UK/${cluster.colo}") .build()) .toString(); response = prism.getFeedHelper().update(feedUpdated, feedUpdated); TimeUtil.sleepSeconds(20); AssertUtil.assertSucceeded(response); prism.getFeedHelper().submitAndSchedule(feedUpdated); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster2OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster2OC, feedName, "RETENTION"), 2); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster3OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster3OC, feedName, "RETENTION"), 1); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster1OC, feedName, "REPLICATION"), 2); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster1OC, feedName, "RETENTION"), 1); } @Test(enabled = true, groups = {"multiCluster"}) public void addTargetCluster() throws Exception { //add one source and one target , schedule only on source feedOriginalSubmit = FeedMerlin.fromString(feed) .addFeedCluster(new FeedMerlin.FeedClusterBuilder(cluster2Name) .withRetention("hours(10)", ActionType.DELETE) .withValidity(startTime, TimeUtil.addMinsToTime(startTime, 65)) .withClusterType(ClusterType.SOURCE) .build()) .toString(); feedOriginalSubmit = FeedMerlin.fromString(feedOriginalSubmit).addFeedCluster( new FeedMerlin.FeedClusterBuilder(cluster3Name) .withRetention("hours(10)", ActionType.DELETE) .withValidity(TimeUtil.addMinsToTime(startTime, 40), TimeUtil.addMinsToTime(startTime, 110)) .withClusterType(ClusterType.SOURCE) .withPartition("UK/${cluster.colo}") .build()) .toString(); LOGGER.info("Feed: " + Util.prettyPrintXml(feedOriginalSubmit)); ServiceResponse response = prism.getFeedHelper().submitEntity(feedOriginalSubmit); TimeUtil.sleepSeconds(10); AssertUtil.assertSucceeded(response); //schedule on source response = cluster2.getFeedHelper().schedule(feedOriginalSubmit); TimeUtil.sleepSeconds(20); AssertUtil.assertSucceeded(response); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster2OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster2OC, feedName, "RETENTION"), 1); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster3OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster3OC, feedName, "RETENTION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster1OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster1OC, feedName, "RETENTION"), 0); //prepare updated Feed feedUpdated = FeedMerlin.fromString(feed) .addFeedCluster(new FeedMerlin.FeedClusterBuilder(cluster2Name) .withRetention("hours(10)", ActionType.DELETE) .withValidity(startTime, TimeUtil.addMinsToTime(startTime, 65)) .withClusterType(ClusterType.SOURCE) .withPartition("US/${cluster.colo}") .build()) .toString(); feedUpdated = FeedMerlin.fromString(feedUpdated).addFeedCluster( new FeedMerlin.FeedClusterBuilder(cluster1Name) .withRetention("hours(10)", ActionType.DELETE) .withValidity(TimeUtil.addMinsToTime(startTime, 20), TimeUtil.addMinsToTime(startTime, 85)) .withClusterType(ClusterType.TARGET) .build()) .toString(); feedUpdated = FeedMerlin.fromString(feedUpdated).addFeedCluster( new FeedMerlin.FeedClusterBuilder(cluster3Name) .withRetention("hours(10)", ActionType.DELETE) .withValidity(TimeUtil.addMinsToTime(startTime, 40), TimeUtil.addMinsToTime(startTime, 110)) .withClusterType(ClusterType.SOURCE) .withPartition("UK/${cluster.colo}") .build()) .toString(); LOGGER.info("Updated Feed: " + Util.prettyPrintXml(feedUpdated)); response = prism.getFeedHelper().update(feedUpdated, feedUpdated); TimeUtil.sleepSeconds(20); AssertUtil.assertSucceeded(response); prism.getFeedHelper().submitAndSchedule(feedUpdated); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster2OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster2OC, feedName, "RETENTION"), 2); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster3OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster3OC, feedName, "RETENTION"), 1); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster1OC, feedName, "REPLICATION"), 2); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster1OC, feedName, "RETENTION"), 1); } @Test(enabled = true, groups = {"multiCluster"}) public void add2SourceCluster() throws Exception { //add one source , schedule only on source feedOriginalSubmit = FeedMerlin.fromString(feed) .addFeedCluster(new FeedMerlin.FeedClusterBuilder(cluster2Name) .withRetention("hours(10)", ActionType.DELETE) .withValidity(startTime, TimeUtil.addMinsToTime(startTime, 65)) .withClusterType(ClusterType.SOURCE) .build()) .toString(); LOGGER.info("Feed: " + Util.prettyPrintXml(feedOriginalSubmit)); ServiceResponse response = prism.getFeedHelper().submitEntity(feedOriginalSubmit); TimeUtil.sleepSeconds(10); AssertUtil.assertSucceeded(response); //schedule on source response = cluster2.getFeedHelper().schedule(feedOriginalSubmit); TimeUtil.sleepSeconds(20); AssertUtil.assertSucceeded(response); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster2OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster2OC, feedName, "RETENTION"), 1); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster3OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster3OC, feedName, "RETENTION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster1OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster1OC, feedName, "RETENTION"), 0); //prepare updated Feed feedUpdated = FeedMerlin.fromString(feed) .addFeedCluster(new FeedMerlin.FeedClusterBuilder(cluster2Name) .withRetention("hours(10)", ActionType.DELETE) .withValidity(startTime, TimeUtil.addMinsToTime(startTime, 65)) .withClusterType(ClusterType.SOURCE) .withPartition("US/${cluster.colo}") .build()) .toString(); feedUpdated = FeedMerlin.fromString(feedUpdated).addFeedCluster( new FeedMerlin.FeedClusterBuilder(cluster1Name) .withRetention("hours(10)", ActionType.DELETE) .withValidity(TimeUtil.addMinsToTime(startTime, 20), TimeUtil.addMinsToTime(startTime, 85)) .withClusterType(ClusterType.SOURCE) .build()) .toString(); feedUpdated = FeedMerlin.fromString(feedUpdated).addFeedCluster( new FeedMerlin.FeedClusterBuilder(cluster3Name) .withRetention("hours(10)", ActionType.DELETE) .withValidity(TimeUtil.addMinsToTime(startTime, 40), TimeUtil.addMinsToTime(startTime, 110)) .withClusterType(ClusterType.SOURCE) .withPartition("UK/${cluster.colo}") .build()) .toString(); LOGGER.info("Updated Feed: " + Util.prettyPrintXml(feedUpdated)); response = prism.getFeedHelper().update(feedUpdated, feedUpdated); TimeUtil.sleepSeconds(20); AssertUtil.assertSucceeded(response); prism.getFeedHelper().submitAndSchedule(feedUpdated); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster2OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster2OC, feedName, "RETENTION"), 2); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster3OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster3OC, feedName, "RETENTION"), 1); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster1OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster1OC, feedName, "RETENTION"), 1); } @Test(enabled = true, groups = {"multiCluster"}) public void add2TargetCluster() throws Exception { //add one source and one target , schedule only on source feedOriginalSubmit = FeedMerlin.fromString(feed) .addFeedCluster(new FeedMerlin.FeedClusterBuilder(cluster2Name) .withRetention("hours(10)", ActionType.DELETE) .withValidity(startTime, TimeUtil.addMinsToTime(startTime, 65)) .withClusterType(ClusterType.SOURCE) .build()) .toString(); LOGGER.info("Feed: " + Util.prettyPrintXml(feedOriginalSubmit)); ServiceResponse response = prism.getFeedHelper().submitEntity(feedOriginalSubmit); TimeUtil.sleepSeconds(10); AssertUtil.assertSucceeded(response); //schedule on source response = cluster2.getFeedHelper().schedule(feedOriginalSubmit); TimeUtil.sleepSeconds(20); AssertUtil.assertSucceeded(response); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster2OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster2OC, feedName, "RETENTION"), 1); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster3OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster3OC, feedName, "RETENTION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster1OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster1OC, feedName, "RETENTION"), 0); //prepare updated Feed feedUpdated = FeedMerlin.fromString(feed) .addFeedCluster(new FeedMerlin.FeedClusterBuilder(cluster2Name) .withRetention("hours(10)", ActionType.DELETE) .withValidity(startTime, TimeUtil.addMinsToTime(startTime, 65)) .withClusterType(ClusterType.SOURCE) .build()) .toString(); feedUpdated = FeedMerlin.fromString(feedUpdated).addFeedCluster( new FeedMerlin.FeedClusterBuilder(cluster1Name) .withRetention("hours(10)", ActionType.DELETE) .withValidity(TimeUtil.addMinsToTime(startTime, 20), TimeUtil.addMinsToTime(startTime, 85)) .withClusterType(ClusterType.TARGET) .build()) .toString(); feedUpdated = FeedMerlin.fromString(feedUpdated).addFeedCluster( new FeedMerlin.FeedClusterBuilder(cluster3Name) .withRetention("hours(10)", ActionType.DELETE) .withValidity(TimeUtil.addMinsToTime(startTime, 40), TimeUtil.addMinsToTime(startTime, 110)) .withClusterType(ClusterType.TARGET) .build()) .toString(); LOGGER.info("Updated Feed: " + Util.prettyPrintXml(feedUpdated)); response = prism.getFeedHelper().update(feedUpdated, feedUpdated); TimeUtil.sleepSeconds(20); AssertUtil.assertSucceeded(response); prism.getFeedHelper().submitAndSchedule(feedUpdated); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster2OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster2OC, feedName, "RETENTION"), 1); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster3OC, feedName, "REPLICATION"), 1); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster3OC, feedName, "RETENTION"), 1); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster1OC, feedName, "REPLICATION"), 1); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster1OC, feedName, "RETENTION"), 1); } @Test(enabled = true, groups = {"multiCluster"}) public void add1Source1TargetCluster() throws Exception { //add one source and one target , schedule only on source feedOriginalSubmit = FeedMerlin.fromString(feed) .addFeedCluster(new FeedMerlin.FeedClusterBuilder(cluster2Name) .withRetention("hours(10)", ActionType.DELETE) .withValidity(startTime, TimeUtil.addMinsToTime(startTime, 65)) .withClusterType(ClusterType.SOURCE) .build()) .toString(); LOGGER.info("Feed: " + Util.prettyPrintXml(feedOriginalSubmit)); ServiceResponse response = prism.getFeedHelper().submitEntity(feedOriginalSubmit); TimeUtil.sleepSeconds(10); AssertUtil.assertSucceeded(response); //schedule on source response = cluster2.getFeedHelper().schedule(feedOriginalSubmit); TimeUtil.sleepSeconds(20); AssertUtil.assertSucceeded(response); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster2OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster2OC, feedName, "RETENTION"), 1); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster3OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster3OC, feedName, "RETENTION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster1OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster1OC, feedName, "RETENTION"), 0); //prepare updated Feed feedUpdated = FeedMerlin.fromString(feed) .addFeedCluster(new FeedMerlin.FeedClusterBuilder(cluster2Name) .withRetention("hours(10)", ActionType.DELETE) .withValidity(startTime, TimeUtil.addMinsToTime(startTime, 65)) .withClusterType(ClusterType.SOURCE) .withPartition("US/${cluster.colo}") .build()) .toString(); feedUpdated = FeedMerlin.fromString(feedUpdated).addFeedCluster( new FeedMerlin.FeedClusterBuilder(cluster1Name) .withRetention("hours(10)", ActionType.DELETE) .withValidity(TimeUtil.addMinsToTime(startTime, 20), TimeUtil.addMinsToTime(startTime, 85)) .withClusterType(ClusterType.TARGET) .build()) .toString(); feedUpdated = FeedMerlin.fromString(feedUpdated).addFeedCluster( new FeedMerlin.FeedClusterBuilder(cluster3Name) .withRetention("hours(10)", ActionType.DELETE) .withValidity(TimeUtil.addMinsToTime(startTime, 40), TimeUtil.addMinsToTime(startTime, 110)) .withClusterType(ClusterType.SOURCE) .withPartition("UK/${cluster.colo}") .build()) .toString(); LOGGER.info("Updated Feed: " + Util.prettyPrintXml(feedUpdated)); response = prism.getFeedHelper().update(feedUpdated, feedUpdated); TimeUtil.sleepSeconds(20); AssertUtil.assertSucceeded(response); prism.getFeedHelper().submitAndSchedule(feedUpdated); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster2OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster2OC, feedName, "RETENTION"), 2); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster3OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster3OC, feedName, "RETENTION"), 1); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster1OC, feedName, "REPLICATION"), 2); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster1OC, feedName, "RETENTION"), 1); } @Test(enabled = true, groups = {"multiCluster"}) public void deleteSourceCluster() throws Exception { //add one source and one target , schedule only on source feedOriginalSubmit = FeedMerlin.fromString(feed) .addFeedCluster(new FeedMerlin.FeedClusterBuilder(cluster2Name) .withRetention("hours(10)", ActionType.DELETE) .withValidity(startTime, TimeUtil.addMinsToTime(startTime, 65)) .withClusterType(ClusterType.SOURCE) .withPartition("US/${cluster.colo}") .build()) .toString(); feedOriginalSubmit = FeedMerlin.fromString(feedOriginalSubmit).addFeedCluster( new FeedMerlin.FeedClusterBuilder(cluster1Name) .withRetention("hours(10)", ActionType.DELETE) .withValidity(TimeUtil.addMinsToTime(startTime, 20), TimeUtil.addMinsToTime(startTime, 85)) .withClusterType(ClusterType.TARGET) .build()) .toString(); feedOriginalSubmit = FeedMerlin.fromString(feedOriginalSubmit).addFeedCluster( new FeedMerlin.FeedClusterBuilder(cluster3Name) .withRetention("hours(10)", ActionType.DELETE) .withValidity(TimeUtil.addMinsToTime(startTime, 40), TimeUtil.addMinsToTime(startTime, 110)) .withClusterType(ClusterType.SOURCE) .withPartition("UK/${cluster.colo}") .build()) .toString(); LOGGER.info("Feed: " + Util.prettyPrintXml(feedOriginalSubmit)); ServiceResponse response = prism.getFeedHelper().submitEntity(feedOriginalSubmit); TimeUtil.sleepSeconds(10); AssertUtil.assertSucceeded(response); //schedule on source response = prism.getFeedHelper().schedule(feedOriginalSubmit); TimeUtil.sleepSeconds(20); AssertUtil.assertSucceeded(response); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster2OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster2OC, feedName, "RETENTION"), 1); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster3OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster3OC, feedName, "RETENTION"), 1); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster1OC, feedName, "REPLICATION"), 2); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster1OC, feedName, "RETENTION"), 1); //prepare updated Feed feedUpdated = FeedMerlin.fromString(feed) .addFeedCluster(new FeedMerlin.FeedClusterBuilder(cluster2Name) .withRetention("hours(10)", ActionType.DELETE) .withValidity(startTime, TimeUtil.addMinsToTime(startTime, 65)) .withClusterType(ClusterType.SOURCE) .build()) .toString(); feedUpdated = FeedMerlin.fromString(feedUpdated).addFeedCluster( new FeedMerlin.FeedClusterBuilder(cluster1Name) .withRetention("hours(10)", ActionType.DELETE) .withValidity(TimeUtil.addMinsToTime(startTime, 20), TimeUtil.addMinsToTime(startTime, 85)) .withClusterType(ClusterType.TARGET) .build()) .toString(); response = prism.getFeedHelper().update(feedUpdated, feedUpdated); TimeUtil.sleepSeconds(20); AssertUtil.assertSucceeded(response); response = cluster3.getFeedHelper().getEntityDefinition(feedUpdated); AssertUtil.assertFailed(response); prism.getFeedHelper().submitAndSchedule(feedUpdated); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster2OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster2OC, feedName, "RETENTION"), 2); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster3OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster3OC, feedName, "RETENTION"), 1); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster1OC, feedName, "REPLICATION"), 3); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster1OC, feedName, "RETENTION"), 2); } @Test(enabled = true, groups = {"multiCluster"}) public void deleteTargetCluster() throws Exception { /* this test creates a multiCluster feed. Cluster1 is the target cluster and cluster3 and Cluster2 are the source cluster. feed is submitted through prism so submitted to both target and source. Feed is scheduled through prism, so only on Cluster3 and Cluster2 retention coord should exists. Cluster1 one which is target both retention and replication coord should exists. there will be 2 replication coord, one each for each source cluster. then we update feed by deleting cluster1 and cluster2 from the feed xml and send update request. Once update is over. definition should go missing from cluster1 and cluster2 and prism and cluster3 should have new def there should be a new retention coord on cluster3 and old number of coord on cluster1 and cluster2 */ //add two source and one target feedOriginalSubmit = FeedMerlin.fromString(feed).clearFeedClusters().toString(); feedOriginalSubmit = FeedMerlin.fromString(feedOriginalSubmit) .addFeedCluster(new FeedMerlin.FeedClusterBuilder(cluster2Name) .withRetention("hours(10)", ActionType.DELETE) .withValidity(startTime, TimeUtil.addMinsToTime(startTime, 65)) .withClusterType(ClusterType.SOURCE) .withPartition("US/${cluster.colo}") .build()) .toString(); feedOriginalSubmit = FeedMerlin.fromString(feedOriginalSubmit).addFeedCluster( new FeedMerlin.FeedClusterBuilder(cluster1Name) .withRetention("hours(10)", ActionType.DELETE) .withValidity(TimeUtil.addMinsToTime(startTime, 20), TimeUtil.addMinsToTime(startTime, 85)) .withClusterType(ClusterType.TARGET) .build()) .toString(); feedOriginalSubmit = FeedMerlin.fromString(feedOriginalSubmit).addFeedCluster( new FeedMerlin.FeedClusterBuilder(cluster3Name) .withRetention("hours(10)", ActionType.DELETE) .withValidity(TimeUtil.addMinsToTime(startTime, 40), TimeUtil.addMinsToTime(startTime, 110)) .withClusterType(ClusterType.SOURCE) .withPartition("UK/${cluster.colo}") .build()) .toString(); LOGGER.info("Feed: " + Util.prettyPrintXml(feedOriginalSubmit)); ServiceResponse response = prism.getFeedHelper().submitEntity(feedOriginalSubmit); TimeUtil.sleepSeconds(10); AssertUtil.assertSucceeded(response); //schedule on source response = prism.getFeedHelper().schedule(feedOriginalSubmit); TimeUtil.sleepSeconds(20); AssertUtil.assertSucceeded(response); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster2OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster2OC, feedName, "RETENTION"), 1); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster3OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster3OC, feedName, "RETENTION"), 1); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster1OC, feedName, "REPLICATION"), 2); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster1OC, feedName, "RETENTION"), 1); //prepare updated Feed feedUpdated = FeedMerlin.fromString(feed).clearFeedClusters().toString(); feedUpdated = FeedMerlin.fromString(feedUpdated).addFeedCluster( new FeedMerlin.FeedClusterBuilder(cluster3Name) .withRetention("hours(10)", ActionType.DELETE) .withValidity(TimeUtil.addMinsToTime(startTime, 40), TimeUtil.addMinsToTime(startTime, 110)) .withClusterType(ClusterType.SOURCE) .withPartition("UK/${cluster.colo}") .build()) .toString(); LOGGER.info("Feed: " + Util.prettyPrintXml(feedUpdated)); response = prism.getFeedHelper().update(feedUpdated, feedUpdated); TimeUtil.sleepSeconds(20); AssertUtil.assertSucceeded(response); //verify xmls definitions response = cluster1.getFeedHelper().getEntityDefinition(feedUpdated); AssertUtil.assertFailed(response); response = cluster2.getFeedHelper().getEntityDefinition(feedUpdated); AssertUtil.assertFailed(response); response = cluster3.getFeedHelper().getEntityDefinition(feedUpdated); Assert.assertTrue(XmlUtil.isIdentical(feedUpdated, response.getMessage())); response = prism.getFeedHelper().getEntityDefinition(feedUpdated); Assert.assertTrue(XmlUtil.isIdentical(feedUpdated, response.getMessage())); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster2OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster2OC, feedName, "RETENTION"), 1); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster3OC, feedName, "REPLICATION"), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster3OC, feedName, "RETENTION"), 1); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster1OC, feedName, "REPLICATION"), 2); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster1OC, feedName, "RETENTION"), 1); } /* @Test(enabled = false) public void delete2SourceCluster() { } @Test(enabled = false) public void delete2TargetCluster() { } @Test(enabled = false) public void delete1Source1TargetCluster() { } */ }
googleapis/google-cloud-java
35,105
java-shopping-merchant-accounts/proto-google-shopping-merchant-accounts-v1/src/main/java/com/google/shopping/merchant/accounts/v1/UpdateHomepageRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/shopping/merchant/accounts/v1/homepage.proto // Protobuf Java Version: 3.25.8 package com.google.shopping.merchant.accounts.v1; /** * * * <pre> * Request message for the `UpdateHomepage` method. * </pre> * * Protobuf type {@code google.shopping.merchant.accounts.v1.UpdateHomepageRequest} */ public final class UpdateHomepageRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.shopping.merchant.accounts.v1.UpdateHomepageRequest) UpdateHomepageRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateHomepageRequest.newBuilder() to construct. private UpdateHomepageRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateHomepageRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateHomepageRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.accounts.v1.HomepageProto .internal_static_google_shopping_merchant_accounts_v1_UpdateHomepageRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.accounts.v1.HomepageProto .internal_static_google_shopping_merchant_accounts_v1_UpdateHomepageRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest.class, com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest.Builder.class); } private int bitField0_; public static final int HOMEPAGE_FIELD_NUMBER = 1; private com.google.shopping.merchant.accounts.v1.Homepage homepage_; /** * * * <pre> * Required. The new version of the homepage. * </pre> * * <code> * .google.shopping.merchant.accounts.v1.Homepage homepage = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the homepage field is set. */ @java.lang.Override public boolean hasHomepage() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The new version of the homepage. * </pre> * * <code> * .google.shopping.merchant.accounts.v1.Homepage homepage = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The homepage. */ @java.lang.Override public com.google.shopping.merchant.accounts.v1.Homepage getHomepage() { return homepage_ == null ? com.google.shopping.merchant.accounts.v1.Homepage.getDefaultInstance() : homepage_; } /** * * * <pre> * Required. The new version of the homepage. * </pre> * * <code> * .google.shopping.merchant.accounts.v1.Homepage homepage = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.shopping.merchant.accounts.v1.HomepageOrBuilder getHomepageOrBuilder() { return homepage_ == null ? com.google.shopping.merchant.accounts.v1.Homepage.getDefaultInstance() : homepage_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Optional. List of fields being updated. * * The following fields are supported (in both `snake_case` and * `lowerCamelCase`): * * - `uri` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. List of fields being updated. * * The following fields are supported (in both `snake_case` and * `lowerCamelCase`): * * - `uri` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Optional. List of fields being updated. * * The following fields are supported (in both `snake_case` and * `lowerCamelCase`): * * - `uri` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getHomepage()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getHomepage()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest)) { return super.equals(obj); } com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest other = (com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest) obj; if (hasHomepage() != other.hasHomepage()) return false; if (hasHomepage()) { if (!getHomepage().equals(other.getHomepage())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasHomepage()) { hash = (37 * hash) + HOMEPAGE_FIELD_NUMBER; hash = (53 * hash) + getHomepage().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for the `UpdateHomepage` method. * </pre> * * Protobuf type {@code google.shopping.merchant.accounts.v1.UpdateHomepageRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.shopping.merchant.accounts.v1.UpdateHomepageRequest) com.google.shopping.merchant.accounts.v1.UpdateHomepageRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.accounts.v1.HomepageProto .internal_static_google_shopping_merchant_accounts_v1_UpdateHomepageRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.accounts.v1.HomepageProto .internal_static_google_shopping_merchant_accounts_v1_UpdateHomepageRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest.class, com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest.Builder.class); } // Construct using com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getHomepageFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; homepage_ = null; if (homepageBuilder_ != null) { homepageBuilder_.dispose(); homepageBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.shopping.merchant.accounts.v1.HomepageProto .internal_static_google_shopping_merchant_accounts_v1_UpdateHomepageRequest_descriptor; } @java.lang.Override public com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest getDefaultInstanceForType() { return com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest.getDefaultInstance(); } @java.lang.Override public com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest build() { com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest buildPartial() { com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest result = new com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.homepage_ = homepageBuilder_ == null ? homepage_ : homepageBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest) { return mergeFrom((com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest other) { if (other == com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest.getDefaultInstance()) return this; if (other.hasHomepage()) { mergeHomepage(other.getHomepage()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getHomepageFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.shopping.merchant.accounts.v1.Homepage homepage_; private com.google.protobuf.SingleFieldBuilderV3< com.google.shopping.merchant.accounts.v1.Homepage, com.google.shopping.merchant.accounts.v1.Homepage.Builder, com.google.shopping.merchant.accounts.v1.HomepageOrBuilder> homepageBuilder_; /** * * * <pre> * Required. The new version of the homepage. * </pre> * * <code> * .google.shopping.merchant.accounts.v1.Homepage homepage = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the homepage field is set. */ public boolean hasHomepage() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The new version of the homepage. * </pre> * * <code> * .google.shopping.merchant.accounts.v1.Homepage homepage = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The homepage. */ public com.google.shopping.merchant.accounts.v1.Homepage getHomepage() { if (homepageBuilder_ == null) { return homepage_ == null ? com.google.shopping.merchant.accounts.v1.Homepage.getDefaultInstance() : homepage_; } else { return homepageBuilder_.getMessage(); } } /** * * * <pre> * Required. The new version of the homepage. * </pre> * * <code> * .google.shopping.merchant.accounts.v1.Homepage homepage = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setHomepage(com.google.shopping.merchant.accounts.v1.Homepage value) { if (homepageBuilder_ == null) { if (value == null) { throw new NullPointerException(); } homepage_ = value; } else { homepageBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The new version of the homepage. * </pre> * * <code> * .google.shopping.merchant.accounts.v1.Homepage homepage = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setHomepage( com.google.shopping.merchant.accounts.v1.Homepage.Builder builderForValue) { if (homepageBuilder_ == null) { homepage_ = builderForValue.build(); } else { homepageBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The new version of the homepage. * </pre> * * <code> * .google.shopping.merchant.accounts.v1.Homepage homepage = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeHomepage(com.google.shopping.merchant.accounts.v1.Homepage value) { if (homepageBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && homepage_ != null && homepage_ != com.google.shopping.merchant.accounts.v1.Homepage.getDefaultInstance()) { getHomepageBuilder().mergeFrom(value); } else { homepage_ = value; } } else { homepageBuilder_.mergeFrom(value); } if (homepage_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The new version of the homepage. * </pre> * * <code> * .google.shopping.merchant.accounts.v1.Homepage homepage = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearHomepage() { bitField0_ = (bitField0_ & ~0x00000001); homepage_ = null; if (homepageBuilder_ != null) { homepageBuilder_.dispose(); homepageBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The new version of the homepage. * </pre> * * <code> * .google.shopping.merchant.accounts.v1.Homepage homepage = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.shopping.merchant.accounts.v1.Homepage.Builder getHomepageBuilder() { bitField0_ |= 0x00000001; onChanged(); return getHomepageFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The new version of the homepage. * </pre> * * <code> * .google.shopping.merchant.accounts.v1.Homepage homepage = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.shopping.merchant.accounts.v1.HomepageOrBuilder getHomepageOrBuilder() { if (homepageBuilder_ != null) { return homepageBuilder_.getMessageOrBuilder(); } else { return homepage_ == null ? com.google.shopping.merchant.accounts.v1.Homepage.getDefaultInstance() : homepage_; } } /** * * * <pre> * Required. The new version of the homepage. * </pre> * * <code> * .google.shopping.merchant.accounts.v1.Homepage homepage = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.shopping.merchant.accounts.v1.Homepage, com.google.shopping.merchant.accounts.v1.Homepage.Builder, com.google.shopping.merchant.accounts.v1.HomepageOrBuilder> getHomepageFieldBuilder() { if (homepageBuilder_ == null) { homepageBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.shopping.merchant.accounts.v1.Homepage, com.google.shopping.merchant.accounts.v1.Homepage.Builder, com.google.shopping.merchant.accounts.v1.HomepageOrBuilder>( getHomepage(), getParentForChildren(), isClean()); homepage_ = null; } return homepageBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Optional. List of fields being updated. * * The following fields are supported (in both `snake_case` and * `lowerCamelCase`): * * - `uri` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. List of fields being updated. * * The following fields are supported (in both `snake_case` and * `lowerCamelCase`): * * - `uri` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Optional. List of fields being updated. * * The following fields are supported (in both `snake_case` and * `lowerCamelCase`): * * - `uri` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. List of fields being updated. * * The following fields are supported (in both `snake_case` and * `lowerCamelCase`): * * - `uri` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. List of fields being updated. * * The following fields are supported (in both `snake_case` and * `lowerCamelCase`): * * - `uri` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Optional. List of fields being updated. * * The following fields are supported (in both `snake_case` and * `lowerCamelCase`): * * - `uri` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Optional. List of fields being updated. * * The following fields are supported (in both `snake_case` and * `lowerCamelCase`): * * - `uri` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Optional. List of fields being updated. * * The following fields are supported (in both `snake_case` and * `lowerCamelCase`): * * - `uri` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Optional. List of fields being updated. * * The following fields are supported (in both `snake_case` and * `lowerCamelCase`): * * - `uri` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.shopping.merchant.accounts.v1.UpdateHomepageRequest) } // @@protoc_insertion_point(class_scope:google.shopping.merchant.accounts.v1.UpdateHomepageRequest) private static final com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest(); } public static com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateHomepageRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateHomepageRequest>() { @java.lang.Override public UpdateHomepageRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateHomepageRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateHomepageRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,221
java-dlp/proto-google-cloud-dlp-v2/src/main/java/com/google/privacy/dlp/v2/VertexDatasetRegexes.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/privacy/dlp/v2/dlp.proto // Protobuf Java Version: 3.25.8 package com.google.privacy.dlp.v2; /** * * * <pre> * A collection of regular expressions to determine what datasets to match * against. * </pre> * * Protobuf type {@code google.privacy.dlp.v2.VertexDatasetRegexes} */ public final class VertexDatasetRegexes extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.privacy.dlp.v2.VertexDatasetRegexes) VertexDatasetRegexesOrBuilder { private static final long serialVersionUID = 0L; // Use VertexDatasetRegexes.newBuilder() to construct. private VertexDatasetRegexes(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private VertexDatasetRegexes() { patterns_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new VertexDatasetRegexes(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_VertexDatasetRegexes_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_VertexDatasetRegexes_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2.VertexDatasetRegexes.class, com.google.privacy.dlp.v2.VertexDatasetRegexes.Builder.class); } public static final int PATTERNS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.privacy.dlp.v2.VertexDatasetRegex> patterns_; /** * * * <pre> * Required. The group of regular expression patterns to match against one or * more datasets. Maximum of 100 entries. The sum of the lengths of all * regular expressions can't exceed 10 KiB. * </pre> * * <code> * repeated .google.privacy.dlp.v2.VertexDatasetRegex patterns = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public java.util.List<com.google.privacy.dlp.v2.VertexDatasetRegex> getPatternsList() { return patterns_; } /** * * * <pre> * Required. The group of regular expression patterns to match against one or * more datasets. Maximum of 100 entries. The sum of the lengths of all * regular expressions can't exceed 10 KiB. * </pre> * * <code> * repeated .google.privacy.dlp.v2.VertexDatasetRegex patterns = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public java.util.List<? extends com.google.privacy.dlp.v2.VertexDatasetRegexOrBuilder> getPatternsOrBuilderList() { return patterns_; } /** * * * <pre> * Required. The group of regular expression patterns to match against one or * more datasets. Maximum of 100 entries. The sum of the lengths of all * regular expressions can't exceed 10 KiB. * </pre> * * <code> * repeated .google.privacy.dlp.v2.VertexDatasetRegex patterns = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public int getPatternsCount() { return patterns_.size(); } /** * * * <pre> * Required. The group of regular expression patterns to match against one or * more datasets. Maximum of 100 entries. The sum of the lengths of all * regular expressions can't exceed 10 KiB. * </pre> * * <code> * repeated .google.privacy.dlp.v2.VertexDatasetRegex patterns = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.privacy.dlp.v2.VertexDatasetRegex getPatterns(int index) { return patterns_.get(index); } /** * * * <pre> * Required. The group of regular expression patterns to match against one or * more datasets. Maximum of 100 entries. The sum of the lengths of all * regular expressions can't exceed 10 KiB. * </pre> * * <code> * repeated .google.privacy.dlp.v2.VertexDatasetRegex patterns = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.privacy.dlp.v2.VertexDatasetRegexOrBuilder getPatternsOrBuilder(int index) { return patterns_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < patterns_.size(); i++) { output.writeMessage(1, patterns_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < patterns_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, patterns_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.privacy.dlp.v2.VertexDatasetRegexes)) { return super.equals(obj); } com.google.privacy.dlp.v2.VertexDatasetRegexes other = (com.google.privacy.dlp.v2.VertexDatasetRegexes) obj; if (!getPatternsList().equals(other.getPatternsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getPatternsCount() > 0) { hash = (37 * hash) + PATTERNS_FIELD_NUMBER; hash = (53 * hash) + getPatternsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.privacy.dlp.v2.VertexDatasetRegexes parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.VertexDatasetRegexes parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.VertexDatasetRegexes parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.VertexDatasetRegexes parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.VertexDatasetRegexes parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.VertexDatasetRegexes parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.VertexDatasetRegexes parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.VertexDatasetRegexes parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2.VertexDatasetRegexes parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.VertexDatasetRegexes parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2.VertexDatasetRegexes parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.VertexDatasetRegexes parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.privacy.dlp.v2.VertexDatasetRegexes prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A collection of regular expressions to determine what datasets to match * against. * </pre> * * Protobuf type {@code google.privacy.dlp.v2.VertexDatasetRegexes} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.privacy.dlp.v2.VertexDatasetRegexes) com.google.privacy.dlp.v2.VertexDatasetRegexesOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_VertexDatasetRegexes_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_VertexDatasetRegexes_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2.VertexDatasetRegexes.class, com.google.privacy.dlp.v2.VertexDatasetRegexes.Builder.class); } // Construct using com.google.privacy.dlp.v2.VertexDatasetRegexes.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (patternsBuilder_ == null) { patterns_ = java.util.Collections.emptyList(); } else { patterns_ = null; patternsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_VertexDatasetRegexes_descriptor; } @java.lang.Override public com.google.privacy.dlp.v2.VertexDatasetRegexes getDefaultInstanceForType() { return com.google.privacy.dlp.v2.VertexDatasetRegexes.getDefaultInstance(); } @java.lang.Override public com.google.privacy.dlp.v2.VertexDatasetRegexes build() { com.google.privacy.dlp.v2.VertexDatasetRegexes result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.privacy.dlp.v2.VertexDatasetRegexes buildPartial() { com.google.privacy.dlp.v2.VertexDatasetRegexes result = new com.google.privacy.dlp.v2.VertexDatasetRegexes(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(com.google.privacy.dlp.v2.VertexDatasetRegexes result) { if (patternsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { patterns_ = java.util.Collections.unmodifiableList(patterns_); bitField0_ = (bitField0_ & ~0x00000001); } result.patterns_ = patterns_; } else { result.patterns_ = patternsBuilder_.build(); } } private void buildPartial0(com.google.privacy.dlp.v2.VertexDatasetRegexes result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.privacy.dlp.v2.VertexDatasetRegexes) { return mergeFrom((com.google.privacy.dlp.v2.VertexDatasetRegexes) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.privacy.dlp.v2.VertexDatasetRegexes other) { if (other == com.google.privacy.dlp.v2.VertexDatasetRegexes.getDefaultInstance()) return this; if (patternsBuilder_ == null) { if (!other.patterns_.isEmpty()) { if (patterns_.isEmpty()) { patterns_ = other.patterns_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensurePatternsIsMutable(); patterns_.addAll(other.patterns_); } onChanged(); } } else { if (!other.patterns_.isEmpty()) { if (patternsBuilder_.isEmpty()) { patternsBuilder_.dispose(); patternsBuilder_ = null; patterns_ = other.patterns_; bitField0_ = (bitField0_ & ~0x00000001); patternsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getPatternsFieldBuilder() : null; } else { patternsBuilder_.addAllMessages(other.patterns_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.privacy.dlp.v2.VertexDatasetRegex m = input.readMessage( com.google.privacy.dlp.v2.VertexDatasetRegex.parser(), extensionRegistry); if (patternsBuilder_ == null) { ensurePatternsIsMutable(); patterns_.add(m); } else { patternsBuilder_.addMessage(m); } break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.privacy.dlp.v2.VertexDatasetRegex> patterns_ = java.util.Collections.emptyList(); private void ensurePatternsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { patterns_ = new java.util.ArrayList<com.google.privacy.dlp.v2.VertexDatasetRegex>(patterns_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.privacy.dlp.v2.VertexDatasetRegex, com.google.privacy.dlp.v2.VertexDatasetRegex.Builder, com.google.privacy.dlp.v2.VertexDatasetRegexOrBuilder> patternsBuilder_; /** * * * <pre> * Required. The group of regular expression patterns to match against one or * more datasets. Maximum of 100 entries. The sum of the lengths of all * regular expressions can't exceed 10 KiB. * </pre> * * <code> * repeated .google.privacy.dlp.v2.VertexDatasetRegex patterns = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public java.util.List<com.google.privacy.dlp.v2.VertexDatasetRegex> getPatternsList() { if (patternsBuilder_ == null) { return java.util.Collections.unmodifiableList(patterns_); } else { return patternsBuilder_.getMessageList(); } } /** * * * <pre> * Required. The group of regular expression patterns to match against one or * more datasets. Maximum of 100 entries. The sum of the lengths of all * regular expressions can't exceed 10 KiB. * </pre> * * <code> * repeated .google.privacy.dlp.v2.VertexDatasetRegex patterns = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public int getPatternsCount() { if (patternsBuilder_ == null) { return patterns_.size(); } else { return patternsBuilder_.getCount(); } } /** * * * <pre> * Required. The group of regular expression patterns to match against one or * more datasets. Maximum of 100 entries. The sum of the lengths of all * regular expressions can't exceed 10 KiB. * </pre> * * <code> * repeated .google.privacy.dlp.v2.VertexDatasetRegex patterns = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.privacy.dlp.v2.VertexDatasetRegex getPatterns(int index) { if (patternsBuilder_ == null) { return patterns_.get(index); } else { return patternsBuilder_.getMessage(index); } } /** * * * <pre> * Required. The group of regular expression patterns to match against one or * more datasets. Maximum of 100 entries. The sum of the lengths of all * regular expressions can't exceed 10 KiB. * </pre> * * <code> * repeated .google.privacy.dlp.v2.VertexDatasetRegex patterns = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setPatterns(int index, com.google.privacy.dlp.v2.VertexDatasetRegex value) { if (patternsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePatternsIsMutable(); patterns_.set(index, value); onChanged(); } else { patternsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Required. The group of regular expression patterns to match against one or * more datasets. Maximum of 100 entries. The sum of the lengths of all * regular expressions can't exceed 10 KiB. * </pre> * * <code> * repeated .google.privacy.dlp.v2.VertexDatasetRegex patterns = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setPatterns( int index, com.google.privacy.dlp.v2.VertexDatasetRegex.Builder builderForValue) { if (patternsBuilder_ == null) { ensurePatternsIsMutable(); patterns_.set(index, builderForValue.build()); onChanged(); } else { patternsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Required. The group of regular expression patterns to match against one or * more datasets. Maximum of 100 entries. The sum of the lengths of all * regular expressions can't exceed 10 KiB. * </pre> * * <code> * repeated .google.privacy.dlp.v2.VertexDatasetRegex patterns = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addPatterns(com.google.privacy.dlp.v2.VertexDatasetRegex value) { if (patternsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePatternsIsMutable(); patterns_.add(value); onChanged(); } else { patternsBuilder_.addMessage(value); } return this; } /** * * * <pre> * Required. The group of regular expression patterns to match against one or * more datasets. Maximum of 100 entries. The sum of the lengths of all * regular expressions can't exceed 10 KiB. * </pre> * * <code> * repeated .google.privacy.dlp.v2.VertexDatasetRegex patterns = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addPatterns(int index, com.google.privacy.dlp.v2.VertexDatasetRegex value) { if (patternsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePatternsIsMutable(); patterns_.add(index, value); onChanged(); } else { patternsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Required. The group of regular expression patterns to match against one or * more datasets. Maximum of 100 entries. The sum of the lengths of all * regular expressions can't exceed 10 KiB. * </pre> * * <code> * repeated .google.privacy.dlp.v2.VertexDatasetRegex patterns = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addPatterns( com.google.privacy.dlp.v2.VertexDatasetRegex.Builder builderForValue) { if (patternsBuilder_ == null) { ensurePatternsIsMutable(); patterns_.add(builderForValue.build()); onChanged(); } else { patternsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Required. The group of regular expression patterns to match against one or * more datasets. Maximum of 100 entries. The sum of the lengths of all * regular expressions can't exceed 10 KiB. * </pre> * * <code> * repeated .google.privacy.dlp.v2.VertexDatasetRegex patterns = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addPatterns( int index, com.google.privacy.dlp.v2.VertexDatasetRegex.Builder builderForValue) { if (patternsBuilder_ == null) { ensurePatternsIsMutable(); patterns_.add(index, builderForValue.build()); onChanged(); } else { patternsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Required. The group of regular expression patterns to match against one or * more datasets. Maximum of 100 entries. The sum of the lengths of all * regular expressions can't exceed 10 KiB. * </pre> * * <code> * repeated .google.privacy.dlp.v2.VertexDatasetRegex patterns = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addAllPatterns( java.lang.Iterable<? extends com.google.privacy.dlp.v2.VertexDatasetRegex> values) { if (patternsBuilder_ == null) { ensurePatternsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, patterns_); onChanged(); } else { patternsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Required. The group of regular expression patterns to match against one or * more datasets. Maximum of 100 entries. The sum of the lengths of all * regular expressions can't exceed 10 KiB. * </pre> * * <code> * repeated .google.privacy.dlp.v2.VertexDatasetRegex patterns = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearPatterns() { if (patternsBuilder_ == null) { patterns_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { patternsBuilder_.clear(); } return this; } /** * * * <pre> * Required. The group of regular expression patterns to match against one or * more datasets. Maximum of 100 entries. The sum of the lengths of all * regular expressions can't exceed 10 KiB. * </pre> * * <code> * repeated .google.privacy.dlp.v2.VertexDatasetRegex patterns = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder removePatterns(int index) { if (patternsBuilder_ == null) { ensurePatternsIsMutable(); patterns_.remove(index); onChanged(); } else { patternsBuilder_.remove(index); } return this; } /** * * * <pre> * Required. The group of regular expression patterns to match against one or * more datasets. Maximum of 100 entries. The sum of the lengths of all * regular expressions can't exceed 10 KiB. * </pre> * * <code> * repeated .google.privacy.dlp.v2.VertexDatasetRegex patterns = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.privacy.dlp.v2.VertexDatasetRegex.Builder getPatternsBuilder(int index) { return getPatternsFieldBuilder().getBuilder(index); } /** * * * <pre> * Required. The group of regular expression patterns to match against one or * more datasets. Maximum of 100 entries. The sum of the lengths of all * regular expressions can't exceed 10 KiB. * </pre> * * <code> * repeated .google.privacy.dlp.v2.VertexDatasetRegex patterns = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.privacy.dlp.v2.VertexDatasetRegexOrBuilder getPatternsOrBuilder(int index) { if (patternsBuilder_ == null) { return patterns_.get(index); } else { return patternsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Required. The group of regular expression patterns to match against one or * more datasets. Maximum of 100 entries. The sum of the lengths of all * regular expressions can't exceed 10 KiB. * </pre> * * <code> * repeated .google.privacy.dlp.v2.VertexDatasetRegex patterns = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public java.util.List<? extends com.google.privacy.dlp.v2.VertexDatasetRegexOrBuilder> getPatternsOrBuilderList() { if (patternsBuilder_ != null) { return patternsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(patterns_); } } /** * * * <pre> * Required. The group of regular expression patterns to match against one or * more datasets. Maximum of 100 entries. The sum of the lengths of all * regular expressions can't exceed 10 KiB. * </pre> * * <code> * repeated .google.privacy.dlp.v2.VertexDatasetRegex patterns = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.privacy.dlp.v2.VertexDatasetRegex.Builder addPatternsBuilder() { return getPatternsFieldBuilder() .addBuilder(com.google.privacy.dlp.v2.VertexDatasetRegex.getDefaultInstance()); } /** * * * <pre> * Required. The group of regular expression patterns to match against one or * more datasets. Maximum of 100 entries. The sum of the lengths of all * regular expressions can't exceed 10 KiB. * </pre> * * <code> * repeated .google.privacy.dlp.v2.VertexDatasetRegex patterns = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.privacy.dlp.v2.VertexDatasetRegex.Builder addPatternsBuilder(int index) { return getPatternsFieldBuilder() .addBuilder(index, com.google.privacy.dlp.v2.VertexDatasetRegex.getDefaultInstance()); } /** * * * <pre> * Required. The group of regular expression patterns to match against one or * more datasets. Maximum of 100 entries. The sum of the lengths of all * regular expressions can't exceed 10 KiB. * </pre> * * <code> * repeated .google.privacy.dlp.v2.VertexDatasetRegex patterns = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public java.util.List<com.google.privacy.dlp.v2.VertexDatasetRegex.Builder> getPatternsBuilderList() { return getPatternsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.privacy.dlp.v2.VertexDatasetRegex, com.google.privacy.dlp.v2.VertexDatasetRegex.Builder, com.google.privacy.dlp.v2.VertexDatasetRegexOrBuilder> getPatternsFieldBuilder() { if (patternsBuilder_ == null) { patternsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.privacy.dlp.v2.VertexDatasetRegex, com.google.privacy.dlp.v2.VertexDatasetRegex.Builder, com.google.privacy.dlp.v2.VertexDatasetRegexOrBuilder>( patterns_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); patterns_ = null; } return patternsBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.privacy.dlp.v2.VertexDatasetRegexes) } // @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.VertexDatasetRegexes) private static final com.google.privacy.dlp.v2.VertexDatasetRegexes DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.privacy.dlp.v2.VertexDatasetRegexes(); } public static com.google.privacy.dlp.v2.VertexDatasetRegexes getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<VertexDatasetRegexes> PARSER = new com.google.protobuf.AbstractParser<VertexDatasetRegexes>() { @java.lang.Override public VertexDatasetRegexes parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<VertexDatasetRegexes> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<VertexDatasetRegexes> getParserForType() { return PARSER; } @java.lang.Override public com.google.privacy.dlp.v2.VertexDatasetRegexes getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,400
java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/BackendServiceFailoverPolicy.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/compute/v1/compute.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.compute.v1; /** * * * <pre> * For load balancers that have configurable failover: [Internal passthrough Network Load Balancers](https://cloud.google.com/load-balancing/docs/internal/failover-overview) and [external passthrough Network Load Balancers](https://cloud.google.com/load-balancing/docs/network/networklb-failover-overview). On failover or failback, this field indicates whether connection draining will be honored. Google Cloud has a fixed connection draining timeout of 10 minutes. A setting of true terminates existing TCP connections to the active pool during failover and failback, immediately draining traffic. A setting of false allows existing TCP connections to persist, even on VMs no longer in the active pool, for up to the duration of the connection draining timeout (10 minutes). * </pre> * * Protobuf type {@code google.cloud.compute.v1.BackendServiceFailoverPolicy} */ public final class BackendServiceFailoverPolicy extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.compute.v1.BackendServiceFailoverPolicy) BackendServiceFailoverPolicyOrBuilder { private static final long serialVersionUID = 0L; // Use BackendServiceFailoverPolicy.newBuilder() to construct. private BackendServiceFailoverPolicy(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private BackendServiceFailoverPolicy() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new BackendServiceFailoverPolicy(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_BackendServiceFailoverPolicy_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_BackendServiceFailoverPolicy_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.BackendServiceFailoverPolicy.class, com.google.cloud.compute.v1.BackendServiceFailoverPolicy.Builder.class); } private int bitField0_; public static final int DISABLE_CONNECTION_DRAIN_ON_FAILOVER_FIELD_NUMBER = 182150753; private boolean disableConnectionDrainOnFailover_ = false; /** * * * <pre> * This can be set to true only if the protocol is TCP. The default is false. * </pre> * * <code>optional bool disable_connection_drain_on_failover = 182150753;</code> * * @return Whether the disableConnectionDrainOnFailover field is set. */ @java.lang.Override public boolean hasDisableConnectionDrainOnFailover() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * This can be set to true only if the protocol is TCP. The default is false. * </pre> * * <code>optional bool disable_connection_drain_on_failover = 182150753;</code> * * @return The disableConnectionDrainOnFailover. */ @java.lang.Override public boolean getDisableConnectionDrainOnFailover() { return disableConnectionDrainOnFailover_; } public static final int DROP_TRAFFIC_IF_UNHEALTHY_FIELD_NUMBER = 112289428; private boolean dropTrafficIfUnhealthy_ = false; /** * * * <pre> * If set to true, connections to the load balancer are dropped when all primary and all backup backend VMs are unhealthy.If set to false, connections are distributed among all primary VMs when all primary and all backup backend VMs are unhealthy. For load balancers that have configurable failover: [Internal passthrough Network Load Balancers](https://cloud.google.com/load-balancing/docs/internal/failover-overview) and [external passthrough Network Load Balancers](https://cloud.google.com/load-balancing/docs/network/networklb-failover-overview). The default is false. * </pre> * * <code>optional bool drop_traffic_if_unhealthy = 112289428;</code> * * @return Whether the dropTrafficIfUnhealthy field is set. */ @java.lang.Override public boolean hasDropTrafficIfUnhealthy() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * If set to true, connections to the load balancer are dropped when all primary and all backup backend VMs are unhealthy.If set to false, connections are distributed among all primary VMs when all primary and all backup backend VMs are unhealthy. For load balancers that have configurable failover: [Internal passthrough Network Load Balancers](https://cloud.google.com/load-balancing/docs/internal/failover-overview) and [external passthrough Network Load Balancers](https://cloud.google.com/load-balancing/docs/network/networklb-failover-overview). The default is false. * </pre> * * <code>optional bool drop_traffic_if_unhealthy = 112289428;</code> * * @return The dropTrafficIfUnhealthy. */ @java.lang.Override public boolean getDropTrafficIfUnhealthy() { return dropTrafficIfUnhealthy_; } public static final int FAILOVER_RATIO_FIELD_NUMBER = 212667006; private float failoverRatio_ = 0F; /** * * * <pre> * The value of the field must be in the range [0, 1]. If the value is 0, the load balancer performs a failover when the number of healthy primary VMs equals zero. For all other values, the load balancer performs a failover when the total number of healthy primary VMs is less than this ratio. For load balancers that have configurable failover: [Internal TCP/UDP Load Balancing](https://cloud.google.com/load-balancing/docs/internal/failover-overview) and [external TCP/UDP Load Balancing](https://cloud.google.com/load-balancing/docs/network/networklb-failover-overview). * </pre> * * <code>optional float failover_ratio = 212667006;</code> * * @return Whether the failoverRatio field is set. */ @java.lang.Override public boolean hasFailoverRatio() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * The value of the field must be in the range [0, 1]. If the value is 0, the load balancer performs a failover when the number of healthy primary VMs equals zero. For all other values, the load balancer performs a failover when the total number of healthy primary VMs is less than this ratio. For load balancers that have configurable failover: [Internal TCP/UDP Load Balancing](https://cloud.google.com/load-balancing/docs/internal/failover-overview) and [external TCP/UDP Load Balancing](https://cloud.google.com/load-balancing/docs/network/networklb-failover-overview). * </pre> * * <code>optional float failover_ratio = 212667006;</code> * * @return The failoverRatio. */ @java.lang.Override public float getFailoverRatio() { return failoverRatio_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000002) != 0)) { output.writeBool(112289428, dropTrafficIfUnhealthy_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeBool(182150753, disableConnectionDrainOnFailover_); } if (((bitField0_ & 0x00000004) != 0)) { output.writeFloat(212667006, failoverRatio_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(112289428, dropTrafficIfUnhealthy_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeBoolSize( 182150753, disableConnectionDrainOnFailover_); } if (((bitField0_ & 0x00000004) != 0)) { size += com.google.protobuf.CodedOutputStream.computeFloatSize(212667006, failoverRatio_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.compute.v1.BackendServiceFailoverPolicy)) { return super.equals(obj); } com.google.cloud.compute.v1.BackendServiceFailoverPolicy other = (com.google.cloud.compute.v1.BackendServiceFailoverPolicy) obj; if (hasDisableConnectionDrainOnFailover() != other.hasDisableConnectionDrainOnFailover()) return false; if (hasDisableConnectionDrainOnFailover()) { if (getDisableConnectionDrainOnFailover() != other.getDisableConnectionDrainOnFailover()) return false; } if (hasDropTrafficIfUnhealthy() != other.hasDropTrafficIfUnhealthy()) return false; if (hasDropTrafficIfUnhealthy()) { if (getDropTrafficIfUnhealthy() != other.getDropTrafficIfUnhealthy()) return false; } if (hasFailoverRatio() != other.hasFailoverRatio()) return false; if (hasFailoverRatio()) { if (java.lang.Float.floatToIntBits(getFailoverRatio()) != java.lang.Float.floatToIntBits(other.getFailoverRatio())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasDisableConnectionDrainOnFailover()) { hash = (37 * hash) + DISABLE_CONNECTION_DRAIN_ON_FAILOVER_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getDisableConnectionDrainOnFailover()); } if (hasDropTrafficIfUnhealthy()) { hash = (37 * hash) + DROP_TRAFFIC_IF_UNHEALTHY_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getDropTrafficIfUnhealthy()); } if (hasFailoverRatio()) { hash = (37 * hash) + FAILOVER_RATIO_FIELD_NUMBER; hash = (53 * hash) + java.lang.Float.floatToIntBits(getFailoverRatio()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.compute.v1.BackendServiceFailoverPolicy parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.BackendServiceFailoverPolicy parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.BackendServiceFailoverPolicy parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.BackendServiceFailoverPolicy parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.BackendServiceFailoverPolicy parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.BackendServiceFailoverPolicy parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.BackendServiceFailoverPolicy parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.BackendServiceFailoverPolicy parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.BackendServiceFailoverPolicy parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.BackendServiceFailoverPolicy parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.BackendServiceFailoverPolicy parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.BackendServiceFailoverPolicy parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.compute.v1.BackendServiceFailoverPolicy prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * For load balancers that have configurable failover: [Internal passthrough Network Load Balancers](https://cloud.google.com/load-balancing/docs/internal/failover-overview) and [external passthrough Network Load Balancers](https://cloud.google.com/load-balancing/docs/network/networklb-failover-overview). On failover or failback, this field indicates whether connection draining will be honored. Google Cloud has a fixed connection draining timeout of 10 minutes. A setting of true terminates existing TCP connections to the active pool during failover and failback, immediately draining traffic. A setting of false allows existing TCP connections to persist, even on VMs no longer in the active pool, for up to the duration of the connection draining timeout (10 minutes). * </pre> * * Protobuf type {@code google.cloud.compute.v1.BackendServiceFailoverPolicy} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.BackendServiceFailoverPolicy) com.google.cloud.compute.v1.BackendServiceFailoverPolicyOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_BackendServiceFailoverPolicy_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_BackendServiceFailoverPolicy_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.BackendServiceFailoverPolicy.class, com.google.cloud.compute.v1.BackendServiceFailoverPolicy.Builder.class); } // Construct using com.google.cloud.compute.v1.BackendServiceFailoverPolicy.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; disableConnectionDrainOnFailover_ = false; dropTrafficIfUnhealthy_ = false; failoverRatio_ = 0F; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_BackendServiceFailoverPolicy_descriptor; } @java.lang.Override public com.google.cloud.compute.v1.BackendServiceFailoverPolicy getDefaultInstanceForType() { return com.google.cloud.compute.v1.BackendServiceFailoverPolicy.getDefaultInstance(); } @java.lang.Override public com.google.cloud.compute.v1.BackendServiceFailoverPolicy build() { com.google.cloud.compute.v1.BackendServiceFailoverPolicy result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.compute.v1.BackendServiceFailoverPolicy buildPartial() { com.google.cloud.compute.v1.BackendServiceFailoverPolicy result = new com.google.cloud.compute.v1.BackendServiceFailoverPolicy(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.compute.v1.BackendServiceFailoverPolicy result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.disableConnectionDrainOnFailover_ = disableConnectionDrainOnFailover_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.dropTrafficIfUnhealthy_ = dropTrafficIfUnhealthy_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.failoverRatio_ = failoverRatio_; to_bitField0_ |= 0x00000004; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.compute.v1.BackendServiceFailoverPolicy) { return mergeFrom((com.google.cloud.compute.v1.BackendServiceFailoverPolicy) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.compute.v1.BackendServiceFailoverPolicy other) { if (other == com.google.cloud.compute.v1.BackendServiceFailoverPolicy.getDefaultInstance()) return this; if (other.hasDisableConnectionDrainOnFailover()) { setDisableConnectionDrainOnFailover(other.getDisableConnectionDrainOnFailover()); } if (other.hasDropTrafficIfUnhealthy()) { setDropTrafficIfUnhealthy(other.getDropTrafficIfUnhealthy()); } if (other.hasFailoverRatio()) { setFailoverRatio(other.getFailoverRatio()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 898315424: { dropTrafficIfUnhealthy_ = input.readBool(); bitField0_ |= 0x00000002; break; } // case 898315424 case 1457206024: { disableConnectionDrainOnFailover_ = input.readBool(); bitField0_ |= 0x00000001; break; } // case 1457206024 case 1701336053: { failoverRatio_ = input.readFloat(); bitField0_ |= 0x00000004; break; } // case 1701336053 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private boolean disableConnectionDrainOnFailover_; /** * * * <pre> * This can be set to true only if the protocol is TCP. The default is false. * </pre> * * <code>optional bool disable_connection_drain_on_failover = 182150753;</code> * * @return Whether the disableConnectionDrainOnFailover field is set. */ @java.lang.Override public boolean hasDisableConnectionDrainOnFailover() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * This can be set to true only if the protocol is TCP. The default is false. * </pre> * * <code>optional bool disable_connection_drain_on_failover = 182150753;</code> * * @return The disableConnectionDrainOnFailover. */ @java.lang.Override public boolean getDisableConnectionDrainOnFailover() { return disableConnectionDrainOnFailover_; } /** * * * <pre> * This can be set to true only if the protocol is TCP. The default is false. * </pre> * * <code>optional bool disable_connection_drain_on_failover = 182150753;</code> * * @param value The disableConnectionDrainOnFailover to set. * @return This builder for chaining. */ public Builder setDisableConnectionDrainOnFailover(boolean value) { disableConnectionDrainOnFailover_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * This can be set to true only if the protocol is TCP. The default is false. * </pre> * * <code>optional bool disable_connection_drain_on_failover = 182150753;</code> * * @return This builder for chaining. */ public Builder clearDisableConnectionDrainOnFailover() { bitField0_ = (bitField0_ & ~0x00000001); disableConnectionDrainOnFailover_ = false; onChanged(); return this; } private boolean dropTrafficIfUnhealthy_; /** * * * <pre> * If set to true, connections to the load balancer are dropped when all primary and all backup backend VMs are unhealthy.If set to false, connections are distributed among all primary VMs when all primary and all backup backend VMs are unhealthy. For load balancers that have configurable failover: [Internal passthrough Network Load Balancers](https://cloud.google.com/load-balancing/docs/internal/failover-overview) and [external passthrough Network Load Balancers](https://cloud.google.com/load-balancing/docs/network/networklb-failover-overview). The default is false. * </pre> * * <code>optional bool drop_traffic_if_unhealthy = 112289428;</code> * * @return Whether the dropTrafficIfUnhealthy field is set. */ @java.lang.Override public boolean hasDropTrafficIfUnhealthy() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * If set to true, connections to the load balancer are dropped when all primary and all backup backend VMs are unhealthy.If set to false, connections are distributed among all primary VMs when all primary and all backup backend VMs are unhealthy. For load balancers that have configurable failover: [Internal passthrough Network Load Balancers](https://cloud.google.com/load-balancing/docs/internal/failover-overview) and [external passthrough Network Load Balancers](https://cloud.google.com/load-balancing/docs/network/networklb-failover-overview). The default is false. * </pre> * * <code>optional bool drop_traffic_if_unhealthy = 112289428;</code> * * @return The dropTrafficIfUnhealthy. */ @java.lang.Override public boolean getDropTrafficIfUnhealthy() { return dropTrafficIfUnhealthy_; } /** * * * <pre> * If set to true, connections to the load balancer are dropped when all primary and all backup backend VMs are unhealthy.If set to false, connections are distributed among all primary VMs when all primary and all backup backend VMs are unhealthy. For load balancers that have configurable failover: [Internal passthrough Network Load Balancers](https://cloud.google.com/load-balancing/docs/internal/failover-overview) and [external passthrough Network Load Balancers](https://cloud.google.com/load-balancing/docs/network/networklb-failover-overview). The default is false. * </pre> * * <code>optional bool drop_traffic_if_unhealthy = 112289428;</code> * * @param value The dropTrafficIfUnhealthy to set. * @return This builder for chaining. */ public Builder setDropTrafficIfUnhealthy(boolean value) { dropTrafficIfUnhealthy_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * If set to true, connections to the load balancer are dropped when all primary and all backup backend VMs are unhealthy.If set to false, connections are distributed among all primary VMs when all primary and all backup backend VMs are unhealthy. For load balancers that have configurable failover: [Internal passthrough Network Load Balancers](https://cloud.google.com/load-balancing/docs/internal/failover-overview) and [external passthrough Network Load Balancers](https://cloud.google.com/load-balancing/docs/network/networklb-failover-overview). The default is false. * </pre> * * <code>optional bool drop_traffic_if_unhealthy = 112289428;</code> * * @return This builder for chaining. */ public Builder clearDropTrafficIfUnhealthy() { bitField0_ = (bitField0_ & ~0x00000002); dropTrafficIfUnhealthy_ = false; onChanged(); return this; } private float failoverRatio_; /** * * * <pre> * The value of the field must be in the range [0, 1]. If the value is 0, the load balancer performs a failover when the number of healthy primary VMs equals zero. For all other values, the load balancer performs a failover when the total number of healthy primary VMs is less than this ratio. For load balancers that have configurable failover: [Internal TCP/UDP Load Balancing](https://cloud.google.com/load-balancing/docs/internal/failover-overview) and [external TCP/UDP Load Balancing](https://cloud.google.com/load-balancing/docs/network/networklb-failover-overview). * </pre> * * <code>optional float failover_ratio = 212667006;</code> * * @return Whether the failoverRatio field is set. */ @java.lang.Override public boolean hasFailoverRatio() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * The value of the field must be in the range [0, 1]. If the value is 0, the load balancer performs a failover when the number of healthy primary VMs equals zero. For all other values, the load balancer performs a failover when the total number of healthy primary VMs is less than this ratio. For load balancers that have configurable failover: [Internal TCP/UDP Load Balancing](https://cloud.google.com/load-balancing/docs/internal/failover-overview) and [external TCP/UDP Load Balancing](https://cloud.google.com/load-balancing/docs/network/networklb-failover-overview). * </pre> * * <code>optional float failover_ratio = 212667006;</code> * * @return The failoverRatio. */ @java.lang.Override public float getFailoverRatio() { return failoverRatio_; } /** * * * <pre> * The value of the field must be in the range [0, 1]. If the value is 0, the load balancer performs a failover when the number of healthy primary VMs equals zero. For all other values, the load balancer performs a failover when the total number of healthy primary VMs is less than this ratio. For load balancers that have configurable failover: [Internal TCP/UDP Load Balancing](https://cloud.google.com/load-balancing/docs/internal/failover-overview) and [external TCP/UDP Load Balancing](https://cloud.google.com/load-balancing/docs/network/networklb-failover-overview). * </pre> * * <code>optional float failover_ratio = 212667006;</code> * * @param value The failoverRatio to set. * @return This builder for chaining. */ public Builder setFailoverRatio(float value) { failoverRatio_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The value of the field must be in the range [0, 1]. If the value is 0, the load balancer performs a failover when the number of healthy primary VMs equals zero. For all other values, the load balancer performs a failover when the total number of healthy primary VMs is less than this ratio. For load balancers that have configurable failover: [Internal TCP/UDP Load Balancing](https://cloud.google.com/load-balancing/docs/internal/failover-overview) and [external TCP/UDP Load Balancing](https://cloud.google.com/load-balancing/docs/network/networklb-failover-overview). * </pre> * * <code>optional float failover_ratio = 212667006;</code> * * @return This builder for chaining. */ public Builder clearFailoverRatio() { bitField0_ = (bitField0_ & ~0x00000004); failoverRatio_ = 0F; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.BackendServiceFailoverPolicy) } // @@protoc_insertion_point(class_scope:google.cloud.compute.v1.BackendServiceFailoverPolicy) private static final com.google.cloud.compute.v1.BackendServiceFailoverPolicy DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.compute.v1.BackendServiceFailoverPolicy(); } public static com.google.cloud.compute.v1.BackendServiceFailoverPolicy getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<BackendServiceFailoverPolicy> PARSER = new com.google.protobuf.AbstractParser<BackendServiceFailoverPolicy>() { @java.lang.Override public BackendServiceFailoverPolicy parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<BackendServiceFailoverPolicy> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<BackendServiceFailoverPolicy> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.compute.v1.BackendServiceFailoverPolicy getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,202
java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/ZoneSetLabelsRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/compute/v1/compute.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.compute.v1; /** * * * <pre> * </pre> * * Protobuf type {@code google.cloud.compute.v1.ZoneSetLabelsRequest} */ public final class ZoneSetLabelsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.compute.v1.ZoneSetLabelsRequest) ZoneSetLabelsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ZoneSetLabelsRequest.newBuilder() to construct. private ZoneSetLabelsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ZoneSetLabelsRequest() { labelFingerprint_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ZoneSetLabelsRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_ZoneSetLabelsRequest_descriptor; } @SuppressWarnings({"rawtypes"}) @java.lang.Override protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection( int number) { switch (number) { case 500195327: return internalGetLabels(); default: throw new RuntimeException("Invalid map field number: " + number); } } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_ZoneSetLabelsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.ZoneSetLabelsRequest.class, com.google.cloud.compute.v1.ZoneSetLabelsRequest.Builder.class); } private int bitField0_; public static final int LABEL_FINGERPRINT_FIELD_NUMBER = 178124825; @SuppressWarnings("serial") private volatile java.lang.Object labelFingerprint_ = ""; /** * * * <pre> * The fingerprint of the previous set of labels for this resource, used to detect conflicts. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You must always provide an up-to-date fingerprint hash in order to update or change labels. Make a get() request to the resource to get the latest fingerprint. * </pre> * * <code>optional string label_fingerprint = 178124825;</code> * * @return Whether the labelFingerprint field is set. */ @java.lang.Override public boolean hasLabelFingerprint() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The fingerprint of the previous set of labels for this resource, used to detect conflicts. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You must always provide an up-to-date fingerprint hash in order to update or change labels. Make a get() request to the resource to get the latest fingerprint. * </pre> * * <code>optional string label_fingerprint = 178124825;</code> * * @return The labelFingerprint. */ @java.lang.Override public java.lang.String getLabelFingerprint() { java.lang.Object ref = labelFingerprint_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); labelFingerprint_ = s; return s; } } /** * * * <pre> * The fingerprint of the previous set of labels for this resource, used to detect conflicts. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You must always provide an up-to-date fingerprint hash in order to update or change labels. Make a get() request to the resource to get the latest fingerprint. * </pre> * * <code>optional string label_fingerprint = 178124825;</code> * * @return The bytes for labelFingerprint. */ @java.lang.Override public com.google.protobuf.ByteString getLabelFingerprintBytes() { java.lang.Object ref = labelFingerprint_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); labelFingerprint_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int LABELS_FIELD_NUMBER = 500195327; private static final class LabelsDefaultEntryHolder { static final com.google.protobuf.MapEntry<java.lang.String, java.lang.String> defaultEntry = com.google.protobuf.MapEntry.<java.lang.String, java.lang.String>newDefaultInstance( com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_ZoneSetLabelsRequest_LabelsEntry_descriptor, com.google.protobuf.WireFormat.FieldType.STRING, "", com.google.protobuf.WireFormat.FieldType.STRING, ""); } @SuppressWarnings("serial") private com.google.protobuf.MapField<java.lang.String, java.lang.String> labels_; private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetLabels() { if (labels_ == null) { return com.google.protobuf.MapField.emptyMapField(LabelsDefaultEntryHolder.defaultEntry); } return labels_; } public int getLabelsCount() { return internalGetLabels().getMap().size(); } /** * * * <pre> * The labels to set for this resource. * </pre> * * <code>map&lt;string, string&gt; labels = 500195327;</code> */ @java.lang.Override public boolean containsLabels(java.lang.String key) { if (key == null) { throw new NullPointerException("map key"); } return internalGetLabels().getMap().containsKey(key); } /** Use {@link #getLabelsMap()} instead. */ @java.lang.Override @java.lang.Deprecated public java.util.Map<java.lang.String, java.lang.String> getLabels() { return getLabelsMap(); } /** * * * <pre> * The labels to set for this resource. * </pre> * * <code>map&lt;string, string&gt; labels = 500195327;</code> */ @java.lang.Override public java.util.Map<java.lang.String, java.lang.String> getLabelsMap() { return internalGetLabels().getMap(); } /** * * * <pre> * The labels to set for this resource. * </pre> * * <code>map&lt;string, string&gt; labels = 500195327;</code> */ @java.lang.Override public /* nullable */ java.lang.String getLabelsOrDefault( java.lang.String key, /* nullable */ java.lang.String defaultValue) { if (key == null) { throw new NullPointerException("map key"); } java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap(); return map.containsKey(key) ? map.get(key) : defaultValue; } /** * * * <pre> * The labels to set for this resource. * </pre> * * <code>map&lt;string, string&gt; labels = 500195327;</code> */ @java.lang.Override public java.lang.String getLabelsOrThrow(java.lang.String key) { if (key == null) { throw new NullPointerException("map key"); } java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap(); if (!map.containsKey(key)) { throw new java.lang.IllegalArgumentException(); } return map.get(key); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 178124825, labelFingerprint_); } com.google.protobuf.GeneratedMessageV3.serializeStringMapTo( output, internalGetLabels(), LabelsDefaultEntryHolder.defaultEntry, 500195327); getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(178124825, labelFingerprint_); } for (java.util.Map.Entry<java.lang.String, java.lang.String> entry : internalGetLabels().getMap().entrySet()) { com.google.protobuf.MapEntry<java.lang.String, java.lang.String> labels__ = LabelsDefaultEntryHolder.defaultEntry .newBuilderForType() .setKey(entry.getKey()) .setValue(entry.getValue()) .build(); size += com.google.protobuf.CodedOutputStream.computeMessageSize(500195327, labels__); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.compute.v1.ZoneSetLabelsRequest)) { return super.equals(obj); } com.google.cloud.compute.v1.ZoneSetLabelsRequest other = (com.google.cloud.compute.v1.ZoneSetLabelsRequest) obj; if (hasLabelFingerprint() != other.hasLabelFingerprint()) return false; if (hasLabelFingerprint()) { if (!getLabelFingerprint().equals(other.getLabelFingerprint())) return false; } if (!internalGetLabels().equals(other.internalGetLabels())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasLabelFingerprint()) { hash = (37 * hash) + LABEL_FINGERPRINT_FIELD_NUMBER; hash = (53 * hash) + getLabelFingerprint().hashCode(); } if (!internalGetLabels().getMap().isEmpty()) { hash = (37 * hash) + LABELS_FIELD_NUMBER; hash = (53 * hash) + internalGetLabels().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.compute.v1.ZoneSetLabelsRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.ZoneSetLabelsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.ZoneSetLabelsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.ZoneSetLabelsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.ZoneSetLabelsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.ZoneSetLabelsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.ZoneSetLabelsRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.ZoneSetLabelsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.ZoneSetLabelsRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.ZoneSetLabelsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.ZoneSetLabelsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.ZoneSetLabelsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.compute.v1.ZoneSetLabelsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * </pre> * * Protobuf type {@code google.cloud.compute.v1.ZoneSetLabelsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.ZoneSetLabelsRequest) com.google.cloud.compute.v1.ZoneSetLabelsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_ZoneSetLabelsRequest_descriptor; } @SuppressWarnings({"rawtypes"}) protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection( int number) { switch (number) { case 500195327: return internalGetLabels(); default: throw new RuntimeException("Invalid map field number: " + number); } } @SuppressWarnings({"rawtypes"}) protected com.google.protobuf.MapFieldReflectionAccessor internalGetMutableMapFieldReflection( int number) { switch (number) { case 500195327: return internalGetMutableLabels(); default: throw new RuntimeException("Invalid map field number: " + number); } } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_ZoneSetLabelsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.ZoneSetLabelsRequest.class, com.google.cloud.compute.v1.ZoneSetLabelsRequest.Builder.class); } // Construct using com.google.cloud.compute.v1.ZoneSetLabelsRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; labelFingerprint_ = ""; internalGetMutableLabels().clear(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_ZoneSetLabelsRequest_descriptor; } @java.lang.Override public com.google.cloud.compute.v1.ZoneSetLabelsRequest getDefaultInstanceForType() { return com.google.cloud.compute.v1.ZoneSetLabelsRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.compute.v1.ZoneSetLabelsRequest build() { com.google.cloud.compute.v1.ZoneSetLabelsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.compute.v1.ZoneSetLabelsRequest buildPartial() { com.google.cloud.compute.v1.ZoneSetLabelsRequest result = new com.google.cloud.compute.v1.ZoneSetLabelsRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.compute.v1.ZoneSetLabelsRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.labelFingerprint_ = labelFingerprint_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.labels_ = internalGetLabels(); result.labels_.makeImmutable(); } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.compute.v1.ZoneSetLabelsRequest) { return mergeFrom((com.google.cloud.compute.v1.ZoneSetLabelsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.compute.v1.ZoneSetLabelsRequest other) { if (other == com.google.cloud.compute.v1.ZoneSetLabelsRequest.getDefaultInstance()) return this; if (other.hasLabelFingerprint()) { labelFingerprint_ = other.labelFingerprint_; bitField0_ |= 0x00000001; onChanged(); } internalGetMutableLabels().mergeFrom(other.internalGetLabels()); bitField0_ |= 0x00000002; this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 1424998602: { labelFingerprint_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 1424998602 case -293404678: { com.google.protobuf.MapEntry<java.lang.String, java.lang.String> labels__ = input.readMessage( LabelsDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); internalGetMutableLabels() .getMutableMap() .put(labels__.getKey(), labels__.getValue()); bitField0_ |= 0x00000002; break; } // case -293404678 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object labelFingerprint_ = ""; /** * * * <pre> * The fingerprint of the previous set of labels for this resource, used to detect conflicts. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You must always provide an up-to-date fingerprint hash in order to update or change labels. Make a get() request to the resource to get the latest fingerprint. * </pre> * * <code>optional string label_fingerprint = 178124825;</code> * * @return Whether the labelFingerprint field is set. */ public boolean hasLabelFingerprint() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The fingerprint of the previous set of labels for this resource, used to detect conflicts. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You must always provide an up-to-date fingerprint hash in order to update or change labels. Make a get() request to the resource to get the latest fingerprint. * </pre> * * <code>optional string label_fingerprint = 178124825;</code> * * @return The labelFingerprint. */ public java.lang.String getLabelFingerprint() { java.lang.Object ref = labelFingerprint_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); labelFingerprint_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The fingerprint of the previous set of labels for this resource, used to detect conflicts. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You must always provide an up-to-date fingerprint hash in order to update or change labels. Make a get() request to the resource to get the latest fingerprint. * </pre> * * <code>optional string label_fingerprint = 178124825;</code> * * @return The bytes for labelFingerprint. */ public com.google.protobuf.ByteString getLabelFingerprintBytes() { java.lang.Object ref = labelFingerprint_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); labelFingerprint_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The fingerprint of the previous set of labels for this resource, used to detect conflicts. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You must always provide an up-to-date fingerprint hash in order to update or change labels. Make a get() request to the resource to get the latest fingerprint. * </pre> * * <code>optional string label_fingerprint = 178124825;</code> * * @param value The labelFingerprint to set. * @return This builder for chaining. */ public Builder setLabelFingerprint(java.lang.String value) { if (value == null) { throw new NullPointerException(); } labelFingerprint_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The fingerprint of the previous set of labels for this resource, used to detect conflicts. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You must always provide an up-to-date fingerprint hash in order to update or change labels. Make a get() request to the resource to get the latest fingerprint. * </pre> * * <code>optional string label_fingerprint = 178124825;</code> * * @return This builder for chaining. */ public Builder clearLabelFingerprint() { labelFingerprint_ = getDefaultInstance().getLabelFingerprint(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * The fingerprint of the previous set of labels for this resource, used to detect conflicts. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You must always provide an up-to-date fingerprint hash in order to update or change labels. Make a get() request to the resource to get the latest fingerprint. * </pre> * * <code>optional string label_fingerprint = 178124825;</code> * * @param value The bytes for labelFingerprint to set. * @return This builder for chaining. */ public Builder setLabelFingerprintBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); labelFingerprint_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.protobuf.MapField<java.lang.String, java.lang.String> labels_; private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetLabels() { if (labels_ == null) { return com.google.protobuf.MapField.emptyMapField(LabelsDefaultEntryHolder.defaultEntry); } return labels_; } private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetMutableLabels() { if (labels_ == null) { labels_ = com.google.protobuf.MapField.newMapField(LabelsDefaultEntryHolder.defaultEntry); } if (!labels_.isMutable()) { labels_ = labels_.copy(); } bitField0_ |= 0x00000002; onChanged(); return labels_; } public int getLabelsCount() { return internalGetLabels().getMap().size(); } /** * * * <pre> * The labels to set for this resource. * </pre> * * <code>map&lt;string, string&gt; labels = 500195327;</code> */ @java.lang.Override public boolean containsLabels(java.lang.String key) { if (key == null) { throw new NullPointerException("map key"); } return internalGetLabels().getMap().containsKey(key); } /** Use {@link #getLabelsMap()} instead. */ @java.lang.Override @java.lang.Deprecated public java.util.Map<java.lang.String, java.lang.String> getLabels() { return getLabelsMap(); } /** * * * <pre> * The labels to set for this resource. * </pre> * * <code>map&lt;string, string&gt; labels = 500195327;</code> */ @java.lang.Override public java.util.Map<java.lang.String, java.lang.String> getLabelsMap() { return internalGetLabels().getMap(); } /** * * * <pre> * The labels to set for this resource. * </pre> * * <code>map&lt;string, string&gt; labels = 500195327;</code> */ @java.lang.Override public /* nullable */ java.lang.String getLabelsOrDefault( java.lang.String key, /* nullable */ java.lang.String defaultValue) { if (key == null) { throw new NullPointerException("map key"); } java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap(); return map.containsKey(key) ? map.get(key) : defaultValue; } /** * * * <pre> * The labels to set for this resource. * </pre> * * <code>map&lt;string, string&gt; labels = 500195327;</code> */ @java.lang.Override public java.lang.String getLabelsOrThrow(java.lang.String key) { if (key == null) { throw new NullPointerException("map key"); } java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap(); if (!map.containsKey(key)) { throw new java.lang.IllegalArgumentException(); } return map.get(key); } public Builder clearLabels() { bitField0_ = (bitField0_ & ~0x00000002); internalGetMutableLabels().getMutableMap().clear(); return this; } /** * * * <pre> * The labels to set for this resource. * </pre> * * <code>map&lt;string, string&gt; labels = 500195327;</code> */ public Builder removeLabels(java.lang.String key) { if (key == null) { throw new NullPointerException("map key"); } internalGetMutableLabels().getMutableMap().remove(key); return this; } /** Use alternate mutation accessors instead. */ @java.lang.Deprecated public java.util.Map<java.lang.String, java.lang.String> getMutableLabels() { bitField0_ |= 0x00000002; return internalGetMutableLabels().getMutableMap(); } /** * * * <pre> * The labels to set for this resource. * </pre> * * <code>map&lt;string, string&gt; labels = 500195327;</code> */ public Builder putLabels(java.lang.String key, java.lang.String value) { if (key == null) { throw new NullPointerException("map key"); } if (value == null) { throw new NullPointerException("map value"); } internalGetMutableLabels().getMutableMap().put(key, value); bitField0_ |= 0x00000002; return this; } /** * * * <pre> * The labels to set for this resource. * </pre> * * <code>map&lt;string, string&gt; labels = 500195327;</code> */ public Builder putAllLabels(java.util.Map<java.lang.String, java.lang.String> values) { internalGetMutableLabels().getMutableMap().putAll(values); bitField0_ |= 0x00000002; return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.ZoneSetLabelsRequest) } // @@protoc_insertion_point(class_scope:google.cloud.compute.v1.ZoneSetLabelsRequest) private static final com.google.cloud.compute.v1.ZoneSetLabelsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.compute.v1.ZoneSetLabelsRequest(); } public static com.google.cloud.compute.v1.ZoneSetLabelsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ZoneSetLabelsRequest> PARSER = new com.google.protobuf.AbstractParser<ZoneSetLabelsRequest>() { @java.lang.Override public ZoneSetLabelsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ZoneSetLabelsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ZoneSetLabelsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.compute.v1.ZoneSetLabelsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/phoenix
34,649
phoenix-core/src/it/java/org/apache/phoenix/schema/tool/SchemaToolExtractionIT.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.schema.tool; import static junit.framework.TestCase.assertTrue; import static junit.framework.TestCase.fail; import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import org.apache.phoenix.end2end.ParallelStatsEnabledIT; import org.apache.phoenix.end2end.ParallelStatsEnabledTest; import org.apache.phoenix.jdbc.PhoenixConnection; import org.apache.phoenix.parse.ParseException; import org.apache.phoenix.parse.SQLParser; import org.apache.phoenix.schema.PTable; import org.apache.phoenix.util.PhoenixRuntime; import org.apache.phoenix.util.PropertiesUtil; import org.apache.phoenix.util.ReadOnlyProps; import org.apache.phoenix.util.SchemaUtil; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; @Category(ParallelStatsEnabledTest.class) public class SchemaToolExtractionIT extends ParallelStatsEnabledIT { @BeforeClass public static synchronized void setup() throws Exception { Map<String, String> props = Collections.emptyMap(); setUpTestDriver(new ReadOnlyProps(props.entrySet().iterator())); } @Test public void testCreateTableStatement() throws Exception { String tableName = generateUniqueName(); String schemaName = generateUniqueName(); String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName, tableName); String createTableStmt = "CREATE TABLE " + pTableFullName + "(K VARCHAR NOT NULL PRIMARY KEY, " + "V1 VARCHAR, V2 VARCHAR) TTL=2592000, IMMUTABLE_ROWS=TRUE, DISABLE_WAL=TRUE"; List<String> queries = new ArrayList<String>() { }; queries.add(createTableStmt); String result = runSchemaExtractionTool(schemaName, tableName, null, queries); Assert.assertEquals(createTableStmt, result.toUpperCase()); } @Test public void testCreateTableStatementLowerCase() throws Exception { String tableName = "lowecasetbl1"; String schemaName = "lowecaseschemaname1"; String pTableFullName = SchemaUtil.getEscapedTableName(schemaName, tableName); String createTableStmt = "CREATE TABLE " + pTableFullName + "(\"smallK\" VARCHAR NOT NULL PRIMARY KEY, " + "\"asd\".V1 VARCHAR, \"foo\".\"bar\" VARCHAR) TTL=2592000, IMMUTABLE_ROWS=true, DISABLE_WAL=true"; List<String> queries = new ArrayList<String>() { }; queries.add(createTableStmt); String result = runSchemaExtractionTool("\"" + schemaName + "\"", "\"" + tableName + "\"", null, queries); Assert.assertEquals(createTableStmt, result); } @Test public void testCreateIndexStatement() throws Exception { String tableName = generateUniqueName(); String schemaName = generateUniqueName(); String indexName = generateUniqueName(); String indexName1 = generateUniqueName(); String indexName2 = generateUniqueName(); String indexName3 = generateUniqueName(); String properties = "TTL=2592000,IMMUTABLE_ROWS=true,DISABLE_WAL=true"; String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName, tableName); String createTableStatement = "CREATE TABLE " + pTableFullName + "(k VARCHAR NOT NULL PRIMARY KEY, \"v1\" VARCHAR, v2 VARCHAR)" + properties; // FIXME never verified String createIndexStatement = "CREATE INDEX " + indexName + " ON " + pTableFullName + "(\"v1\" DESC) INCLUDE (v2)"; // FIXME never verified String createIndexStatement1 = "CREATE INDEX " + indexName1 + " ON " + pTableFullName + "(v2 DESC) INCLUDE (\"v1\")"; String createIndexStatement2 = "CREATE INDEX " + indexName2 + " ON " + pTableFullName + "(k)"; String createIndexStatement3 = "CREATE INDEX " + indexName3 + " ON " + pTableFullName + "('QUOTED' || \"v1\" || V2 DESC, \"v1\" DESC, K) INCLUDE (V2)"; List<String> queries = new ArrayList<String>() { }; queries.add(createTableStatement); queries.add(createIndexStatement); queries.add(createIndexStatement1); queries.add(createIndexStatement2); String result = runSchemaExtractionTool(schemaName, indexName2, null, queries); Assert.assertEquals(createIndexStatement2.toUpperCase(), result.toUpperCase()); List<String> queries3 = new ArrayList<String>() { }; queries3.add(createIndexStatement3); String result3 = runSchemaExtractionTool(schemaName, indexName3, null, queries3); Assert.assertEquals(createIndexStatement3, result3); } @Test public void testDDLsWithDefaults() throws Exception { Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); String tableName = generateUniqueName(); String schemaName = generateUniqueName(); String indexName = generateUniqueName(); String properties = "COLUMN_ENCODED_BYTES=4"; String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName, tableName); String pIndexFullName = SchemaUtil.getQualifiedTableName(schemaName, indexName); String createTableStatement = "CREATE TABLE " + pTableFullName + "(k VARCHAR NOT NULL PRIMARY KEY, v1 VARCHAR, v2 VARCHAR)"; String createIndexStatement = "CREATE INDEX " + indexName + " ON " + pTableFullName + "(v1 DESC) INCLUDE (v2)" + properties; List<String> queries = new ArrayList<String>() { }; queries.add(createTableStatement); queries.add(createIndexStatement); try (PhoenixConnection conn = DriverManager.getConnection(getUrl(), props).unwrap(PhoenixConnection.class)) { executeCreateStatements(conn, queries); PTable pData = conn.getTable(pTableFullName); PTable pIndex = conn.getTable(pIndexFullName); SchemaExtractionProcessor schemaExtractionProcessor = new SchemaExtractionProcessor(null, config, pData, true); String tableDDL = schemaExtractionProcessor.process(); assertTrue(tableDDL.contains("IMMUTABLE_STORAGE_SCHEME")); SchemaExtractionProcessor schemaExtractionProcessorIndex = new SchemaExtractionProcessor(null, config, pIndex, true); String indexDDL = schemaExtractionProcessorIndex.process(); assertTrue(indexDDL.contains("IMMUTABLE_STORAGE_SCHEME")); assertTrue(indexDDL.contains("ENCODING_SCHEME='FOUR_BYTE_QUALIFIERS'")); } } @Test public void testCreateLocalIndexStatement() throws Exception { String tableName = generateUniqueName(); String schemaName = generateUniqueName(); String indexName = generateUniqueName(); String indexName2 = generateUniqueName(); String properties = "TTL=2592000,IMMUTABLE_ROWS=true,DISABLE_WAL=true"; String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName, tableName); String createTableStatement = "CREATE TABLE " + pTableFullName + "(k VARCHAR NOT NULL PRIMARY KEY, v1 VARCHAR, v2 VARCHAR)" + properties; String createIndexStatement = "CREATE LOCAL INDEX " + indexName + " ON " + pTableFullName + "(v1 DESC, k) INCLUDE (v2)"; String createIndexStatement2 = "CREATE LOCAL INDEX " + indexName2 + " ON " + pTableFullName + "( LPAD(v1,10) DESC, k) INCLUDE (v2)"; List<String> queries = new ArrayList<String>() { }; queries.add(createTableStatement); queries.add(createIndexStatement); String result = runSchemaExtractionTool(schemaName, indexName, null, queries); Assert.assertEquals(createIndexStatement.toUpperCase(), result.toUpperCase()); List<String> queries2 = new ArrayList<String>() { }; queries2.add(createIndexStatement2); String result2 = runSchemaExtractionTool(schemaName, indexName2, null, queries2); Assert.assertEquals(createIndexStatement2.toUpperCase(), result2.toUpperCase()); } @Test public void testCreateLocalIndexStatementLowerCase() throws Exception { String tableName = generateUniqueName(); String schemaName = generateUniqueName(); String indexName = generateUniqueName(); String properties = "TTL=2592000,IMMUTABLE_ROWS=true,DISABLE_WAL=true"; String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName, tableName); String createTableStatement = "CREATE TABLE " + pTableFullName + "(K VARCHAR NOT NULL PRIMARY KEY, \"v1\" VARCHAR, V2 VARCHAR)" + properties; String createIndexStatement = "CREATE LOCAL INDEX " + indexName + " ON " + pTableFullName + "( LPAD(\"v1\",10) DESC, K) INCLUDE (V2)"; List<String> queries = new ArrayList<String>() { }; queries.add(createTableStatement); queries.add(createIndexStatement); String result = runSchemaExtractionTool(schemaName, indexName, null, queries); Assert.assertEquals(createIndexStatement, result); } @Test public void testCreateIndexStatementLowerCase() throws Exception { String tableName = "lowercase" + generateUniqueName(); String schemaName = "lowercase" + generateUniqueName(); String indexName = "\"lowercaseIND" + generateUniqueName() + "\""; String properties = "TTL=2592000,IMMUTABLE_ROWS=true,DISABLE_WAL=true"; String pTableFullName = SchemaUtil.getEscapedTableName(schemaName, tableName); String createTableStatement = "CREATE TABLE " + pTableFullName + "(\"k\" VARCHAR NOT NULL PRIMARY KEY, \"a\".V1 VARCHAR, \"v2\" VARCHAR)" + properties; String createIndexStatement = "CREATE INDEX " + indexName + " ON " + pTableFullName + "(\"a\".V1 DESC, \"k\") INCLUDE (\"v2\")"; List<String> queries = new ArrayList<String>() { }; queries.add(createTableStatement); queries.add(createIndexStatement); String result = runSchemaExtractionTool("\"" + schemaName + "\"", indexName, null, queries); Assert.assertEquals(createIndexStatement, result); } @Test public void testCreateIndexStatementLowerCaseCombined() throws Exception { String tableName = "lowercase" + generateUniqueName(); String schemaName = "lowercase" + generateUniqueName(); String indexName = "\"lowercaseIND" + generateUniqueName() + "\""; String properties = "TTL=2592000,IMMUTABLE_ROWS=true,DISABLE_WAL=true"; String pTableFullName = SchemaUtil.getEscapedTableName(schemaName, tableName); String createTableStatement = "CREATE TABLE " + pTableFullName + "(ID varchar primary key, \"number\" integer, \"currency\" decimal(6,2), lista varchar[])" + properties; String createIndexStatement = "CREATE INDEX " + indexName + " ON " + pTableFullName + "(\"number\" * \"currency\", ID) INCLUDE (LISTA)"; List<String> queries = new ArrayList<String>() { }; queries.add(createTableStatement); queries.add(createIndexStatement); String result = runSchemaExtractionTool("\"" + schemaName + "\"", indexName, null, queries); Assert.assertEquals(createIndexStatement, result); } @Test public void testCreateViewStatement() throws Exception { String tableName = generateUniqueName(); String schemaName = generateUniqueName(); String viewName = generateUniqueName(); String properties = "TTL=2592000,IMMUTABLE_ROWS=true,DISABLE_WAL=true"; String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName, tableName); String createTableStmt = "CREATE TABLE " + pTableFullName + "(k BIGINT NOT NULL PRIMARY KEY, " + "v1 VARCHAR, v2 VARCHAR)" + properties; String viewFullName = SchemaUtil.getQualifiedTableName(schemaName, viewName); String createView = "CREATE VIEW " + viewFullName + "(id1 BIGINT, id2 BIGINT NOT NULL, " + "id3 VARCHAR NOT NULL CONSTRAINT PKVIEW PRIMARY KEY (id2, id3 DESC)) " + "AS SELECT * FROM " + pTableFullName; List<String> queries = new ArrayList<String>() { }; queries.add(createTableStmt); queries.add(createView); String result = runSchemaExtractionTool(schemaName, viewName, null, queries); Assert.assertEquals(createView.toUpperCase(), result.toUpperCase()); } @Test public void testCreateViewStatementLowerCase() throws Exception { String tableName = "lowercase" + generateUniqueName(); String schemaName = "lowercase" + generateUniqueName(); String viewName = "lowercase" + generateUniqueName(); String properties = "TTL=2592000,IMMUTABLE_ROWS=true,DISABLE_WAL=true"; String pTableFullName = SchemaUtil.getEscapedTableName(schemaName, tableName); String createTableStmt = "CREATE TABLE " + pTableFullName + "(\"k\" BIGINT NOT NULL PRIMARY KEY, " + "\"a\".V1 VARCHAR, v2 VARCHAR)" + properties; String viewFullName = SchemaUtil.getEscapedTableName(schemaName, viewName); String createView = "CREATE VIEW " + viewFullName + "(ID1 BIGINT, \"id2\" BIGINT NOT NULL, " + "ID3 VARCHAR NOT NULL CONSTRAINT PKVIEW PRIMARY KEY (\"id2\", ID3 DESC)) " + "AS SELECT * FROM " + pTableFullName + " WHERE \"k\" > 3"; List<String> queries = new ArrayList<String>() { }; queries.add(createTableStmt); queries.add(createView); String result = runSchemaExtractionTool("\"" + schemaName + "\"", "\"" + viewName + "\"", null, queries); Assert.assertEquals(createView, result); } @Test public void testCreateViewStatement_customName() throws Exception { String tableName = generateUniqueName(); String schemaName = generateUniqueName(); String viewName = generateUniqueName() + "@@"; String properties = "TTL=2592000,IMMUTABLE_ROWS=true,DISABLE_WAL=true"; String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName, tableName); String createTableStmt = "CREATE TABLE " + pTableFullName + "(k BIGINT NOT NULL PRIMARY KEY, " + "v1 VARCHAR, v2 VARCHAR)" + properties; String viewFullName = SchemaUtil.getPTableFullNameWithQuotes(schemaName, viewName); String createView = "CREATE VIEW " + viewFullName + "(id1 BIGINT, id2 BIGINT NOT NULL, " + "id3 VARCHAR NOT NULL CONSTRAINT PKVIEW PRIMARY KEY (id2, id3 DESC)) " + "AS SELECT * FROM " + pTableFullName; List<String> queries = new ArrayList<String>() { }; queries.add(createTableStmt); queries.add(createView); String result = runSchemaExtractionTool(schemaName, viewName, null, queries); Assert.assertEquals(createView.toUpperCase(), result.toUpperCase()); } @Test public void testCreateViewIndexStatement() throws Exception { String tableName = generateUniqueName(); String schemaName = generateUniqueName(); String viewName = generateUniqueName(); String childView = generateUniqueName(); String indexName = generateUniqueName(); String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName, tableName); String createTableStmt = "CREATE TABLE " + pTableFullName + "(k BIGINT NOT NULL PRIMARY KEY, " + "v1 VARCHAR, v2 VARCHAR)"; String viewFullName = SchemaUtil.getQualifiedTableName(schemaName, viewName); String childviewName = SchemaUtil.getQualifiedTableName(schemaName, childView); String createView = "CREATE VIEW " + viewFullName + "(id1 BIGINT, id2 BIGINT NOT NULL, " + "id3 VARCHAR NOT NULL CONSTRAINT PKVIEW PRIMARY KEY (id2, id3 DESC)) " + "AS SELECT * FROM " + pTableFullName; String createView1 = "CREATE VIEW " + childviewName + " AS SELECT * FROM " + viewFullName; String createIndexStatement = "CREATE INDEX " + indexName + " ON " + childviewName + "(id2, id1) INCLUDE (v1)"; List<String> queries = new ArrayList<String>() { }; queries.add(createTableStmt); queries.add(createView); queries.add(createView1); queries.add(createIndexStatement); String expected = "CREATE INDEX %s ON " + childviewName + "(ID2, ID1, K, ID3 DESC) INCLUDE (V1)"; String result = runSchemaExtractionTool(schemaName, indexName, null, queries); Assert.assertEquals(String.format(expected, indexName).toUpperCase(), result.toUpperCase()); queries.clear(); String newIndex = indexName + "_NEW"; queries.add(String.format(expected, newIndex)); Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); try (Connection conn = DriverManager.getConnection(getUrl(), props)) { executeCreateStatements(conn, queries); } compareOrdinalPositions(indexName, newIndex); } private void compareOrdinalPositions(String table, String newTable) throws SQLException { String ordinalQuery = "SELECT COLUMN_NAME, " + "ORDINAL_POSITION FROM SYSTEM.CATALOG" + " WHERE TABLE_NAME='%s' AND ORDINAL_POSITION IS NOT NULL ORDER BY COLUMN_NAME"; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Map<String, Integer> ordinalMap = new HashMap<>(); try (Connection conn = DriverManager.getConnection(getUrl(), props)) { ResultSet rs = conn.createStatement().executeQuery(String.format(ordinalQuery, table)); while (rs.next()) { ordinalMap.put(rs.getString(1), rs.getInt(2)); } rs = conn.createStatement().executeQuery(String.format(ordinalQuery, newTable)); while (rs.next()) { Assert.assertEquals(ordinalMap.get(rs.getString(1)).intValue(), rs.getInt(2)); } } } @Test public void testCreateViewStatement_tenant() throws Exception { String tableName = generateUniqueName(); String viewName = generateUniqueName(); String schemaName = generateUniqueName(); String tenantId = "abc"; String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName, tableName); String createTableStmt = "CREATE TABLE " + pTableFullName + "(k BIGINT NOT NULL PRIMARY KEY, " + "v1 VARCHAR, v2 VARCHAR)"; String viewFullName = SchemaUtil.getPTableFullNameWithQuotes(schemaName, viewName); String createViewStmt = "CREATE VIEW " + viewFullName + "(id1 BIGINT, id2 BIGINT NOT NULL, " + "id3 VARCHAR NOT NULL CONSTRAINT PKVIEW PRIMARY KEY (id2, id3 DESC)) " + "AS SELECT * FROM " + pTableFullName; List<String> queries1 = new ArrayList<String>() { }; queries1.add(createTableStmt); runSchemaExtractionTool(schemaName, tableName, null, queries1); List<String> queries2 = new ArrayList<String>(); queries2.add(createViewStmt); String result2 = runSchemaExtractionTool(schemaName, viewName, tenantId, queries2); Assert.assertEquals(createViewStmt.toUpperCase(), result2.toUpperCase()); } @Test public void testSaltedTableStatement() throws Exception { String tableName = generateUniqueName(); String schemaName = generateUniqueName(); String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName, tableName); String query = "create table " + pTableFullName + "(a_integer integer not null CONSTRAINT pk PRIMARY KEY (a_integer)) SALT_BUCKETS=16"; List<String> queries = new ArrayList<String>() { }; queries.add(query); String result = runSchemaExtractionTool(schemaName, tableName, null, queries); Assert.assertTrue(getProperties(result).contains("SALT_BUCKETS=16")); } @Test public void testCreateTableWithPKConstraint() throws Exception { String tableName = generateUniqueName(); String schemaName = generateUniqueName(); String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName, tableName); String query = "create table " + pTableFullName + "(a_char CHAR(15) NOT NULL, " + "b_char CHAR(15) NOT NULL, " + "c_bigint BIGINT NOT NULL CONSTRAINT PK PRIMARY KEY (a_char, b_char, c_bigint)) IMMUTABLE_ROWS=TRUE"; List<String> queries = new ArrayList<String>() { }; queries.add(query); String result = runSchemaExtractionTool(schemaName, tableName, null, queries); Assert.assertEquals(query.toUpperCase(), result.toUpperCase()); } @Test public void testCreateTableWithArrayColumn() throws Exception { String tableName = generateUniqueName(); String pTableFullName = tableName; String query = "create table " + pTableFullName + "(a_char CHAR(15) NOT NULL, " + "b_char CHAR(10) NOT NULL, " + "c_var_array VARCHAR ARRAY, " + "d_char_array CHAR(15) ARRAY[3] " + "CONSTRAINT PK PRIMARY KEY (a_char, b_char)) " + "TTL=2592000, IMMUTABLE_STORAGE_SCHEME='ONE_CELL_PER_COLUMN', REPLICATION_SCOPE=1"; List<String> queries = new ArrayList<String>() { }; queries.add(query); String result = runSchemaExtractionTool("", tableName, null, queries); Assert.assertEquals(query.toUpperCase(), result.toUpperCase()); } @Test public void testCreateTableWithDefaultCFProperties() throws Exception { String tableName = generateUniqueName(); String schemaName = generateUniqueName(); String properties = "KEEP_DELETED_CELLS=TRUE, TTL=1209600, IMMUTABLE_STORAGE_SCHEME='ONE_CELL_PER_COLUMN', " + "REPLICATION_SCOPE=1, DEFAULT_COLUMN_FAMILY='cv', SALT_BUCKETS=16, MULTI_TENANT=true, TIME_TEST='72HOURS'"; String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName, tableName); String query = "create table " + pTableFullName + "(a_char CHAR(15) NOT NULL, " + "b_char CHAR(10) NOT NULL, " + "\"av\".\"_\" CHAR(1), " + "\"bv\".\"_\" CHAR(1), " + "\"cv\".\"_\" CHAR(1), " + "\"dv\".\"_\" CHAR(1) CONSTRAINT PK PRIMARY KEY (a_char, b_char)) " + properties; List<String> queries = new ArrayList<String>() { }; queries.add(query); String result = runSchemaExtractionTool(schemaName, tableName, null, queries); Assert.assertTrue(compareProperties(properties, getProperties(result))); } @Test public void testCreateTableWithCFProperties() throws Exception { String tableName = generateUniqueName(); String schemaName = generateUniqueName(); String properties = "\"av\".VERSIONS=2, \"bv\".VERSIONS=2, " + "DATA_BLOCK_ENCODING='DIFF', " + "IMMUTABLE_STORAGE_SCHEME='ONE_CELL_PER_COLUMN', SALT_BUCKETS=16, MULTI_TENANT=true"; String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName, tableName); String query = "create table " + pTableFullName + "(a_char CHAR(15) NOT NULL, " + "b_char CHAR(10) NOT NULL, " + "\"av\".\"_\" CHAR(1), " + "\"bv\".\"_\" CHAR(1), " + "\"cv\".\"_\" CHAR(1) " + "CONSTRAINT PK PRIMARY KEY (a_char, b_char)) " + properties; List<String> queries = new ArrayList<String>() { }; queries.add(query); String result = runSchemaExtractionTool(schemaName, tableName, null, queries); Assert.assertTrue(compareProperties(properties, getProperties(result))); } @Test public void testCreateTableWithMultipleCF() throws Exception { String tableName = generateUniqueName(); String schemaName = generateUniqueName(); String properties = "\"av\".VERSIONS=2, \"bv\".VERSIONS=3, " + "\"cv\".VERSIONS=4, DATA_BLOCK_ENCODING='DIFF', " + "IMMUTABLE_STORAGE_SCHEME='ONE_CELL_PER_COLUMN', SALT_BUCKETS=16, MULTI_TENANT=true"; String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName, tableName); final String query = "create table " + pTableFullName + "(a_char CHAR(15) NOT NULL, " + "b_char CHAR(10) NOT NULL, " + "\"av\".\"_\" CHAR(1), " + "\"bv\".\"_\" CHAR(1), " + "\"cv\".\"_\" CHAR(1), " + "\"dv\".\"_\" CHAR(1) CONSTRAINT PK PRIMARY KEY (a_char, b_char)) " + properties; List<String> queries = new ArrayList<String>() { }; queries.add(query); String result = runSchemaExtractionTool(schemaName, tableName, null, queries); Assert.assertTrue(compareProperties(properties, getProperties(result))); } @Test public void testCreateTableWithMultipleCFProperties() throws Exception { String tableName = "07" + generateUniqueName(); String schemaName = generateUniqueName(); String properties = "\"av\".DATA_BLOCK_ENCODING='DIFF', \"bv\".DATA_BLOCK_ENCODING='DIFF', " + "\"cv\".DATA_BLOCK_ENCODING='DIFF', " + "IMMUTABLE_STORAGE_SCHEME='ONE_CELL_PER_COLUMN', " + "SALT_BUCKETS=16, MULTI_TENANT=true, BLOOMFITER='ROW'"; String simplifiedProperties = "DATA_BLOCK_ENCODING='DIFF', " + "IMMUTABLE_STORAGE_SCHEME='ONE_CELL_PER_COLUMN', " + "SALT_BUCKETS=16, MULTI_TENANT=true, BLOOMFITER='ROW'"; String query = "create table " + schemaName + ".\"" + tableName + "\"" + "(a_char CHAR(15) NOT NULL, " + "b_char CHAR(10) NOT NULL, " + "\"av\".\"_\" CHAR(1), " + "\"bv\".\"_\" CHAR(1), " + "\"cv\".\"_\" CHAR(1) CONSTRAINT PK PRIMARY KEY (a_char, b_char)) " + properties; List<String> queries = new ArrayList<String>() { }; queries.add(query); String result = runSchemaExtractionTool(schemaName, tableName, null, queries); try { new SQLParser(result).parseStatement(); } catch (ParseException pe) { fail("This should not happen!"); } Assert.assertTrue(compareProperties(simplifiedProperties, getProperties(result))); } @Test public void testColumnAndPKOrdering() throws Exception { String table = "CREATE TABLE IF NOT EXISTS MY_SCHEMA.MY_DATA_TABLE (\n" + " ORGANIZATION_ID CHAR(15) NOT NULL, \n" + " KEY_PREFIX CHAR(3) NOT NULL,\n" + " CREATED_DATE DATE,\n" + " CREATED_BY CHAR(15) \n" + " CONSTRAINT PK PRIMARY KEY (\n" + " ORGANIZATION_ID, \n" + " KEY_PREFIX\n" + " )\n" + ") VERSIONS=1, IMMUTABLE_ROWS=true, MULTI_TENANT=true, REPLICATION_SCOPE=1"; String view = "CREATE VIEW IF NOT EXISTS MY_SCHEMA.MY_DATA_VIEW (\n" + " DATE_TIME1 DATE NOT NULL,\n" + " TEXT1 VARCHAR NOT NULL,\n" + " INT1 BIGINT NOT NULL,\n" + " DOUBLE1 DECIMAL(12, 3),\n" + " DOUBLE2 DECIMAL(12, 3),\n" + " DOUBLE3 DECIMAL(12, 3),\n" + " CONSTRAINT PKVIEW PRIMARY KEY\n" + " (\n" + " DATE_TIME1, TEXT1, INT1\n" + " )\n" + ")\n" + "AS SELECT * FROM MY_SCHEMA.MY_DATA_TABLE WHERE KEY_PREFIX = '9Yj'"; String index = "CREATE INDEX IF NOT EXISTS MY_VIEW_INDEX\n" + "ON MY_SCHEMA.MY_DATA_VIEW (TEXT1, DATE_TIME1 DESC, DOUBLE1)\n" + "INCLUDE (CREATED_BY, CREATED_DATE)"; List<String> queries = new ArrayList<String>() { }; queries.add(table); queries.add(view); queries.add(index); String expectedIndex = "CREATE INDEX MY_VIEW_INDEX " + "ON MY_SCHEMA.MY_DATA_VIEW(TEXT1, DATE_TIME1 DESC, DOUBLE1, INT1)" + " INCLUDE (CREATED_BY, CREATED_DATE)"; String result = runSchemaExtractionTool("MY_SCHEMA", "MY_VIEW_INDEX", null, queries); Assert.assertEquals(expectedIndex.toUpperCase(), result.toUpperCase()); String expectedView = "CREATE VIEW MY_SCHEMA.MY_DATA_VIEW(DATE_TIME1 DATE NOT NULL, " + "TEXT1 VARCHAR NOT NULL, INT1 BIGINT NOT NULL, DOUBLE1 DECIMAL(12,3), " + "DOUBLE2 DECIMAL(12,3), DOUBLE3 DECIMAL(12,3)" + " CONSTRAINT PKVIEW PRIMARY KEY (DATE_TIME1, TEXT1, INT1))" + " AS SELECT * FROM MY_SCHEMA.MY_DATA_TABLE WHERE KEY_PREFIX = '9YJ'"; result = runSchemaExtractionTool("MY_SCHEMA", "MY_DATA_VIEW", null, new ArrayList<String>()); Assert.assertEquals(expectedView.toUpperCase(), result.toUpperCase()); } @Test public void testColumnAndPKOrdering_nonView() throws Exception { String indexName = "MY_DATA_TABLE_INDEX"; String table = "CREATE TABLE MY_SCHEMA.MY_SAMPLE_DATA_TABLE(" + "ORGANIZATION_ID CHAR(15) NOT NULL," + " SOME_ID_COLUMN CHAR(3) NOT NULL," + " SOME_ID_COLUMN_2 CHAR(15) NOT NULL," + " CREATED_DATE DATE NOT NULL," + " SOME_ID_COLUMN_3 CHAR(15) NOT NULL," + " SOME_ID_COLUMN_4 CHAR(15)," + " CREATED_BY_ID VARCHAR," + " VALUE_FIELD VARCHAR" + " CONSTRAINT PK PRIMARY KEY (ORGANIZATION_ID, SOME_ID_COLUMN, SOME_ID_COLUMN_2," + " CREATED_DATE DESC, SOME_ID_COLUMN_3))" + " IMMUTABLE_ROWS=true, IMMUTABLE_STORAGE_SCHEME='ONE_CELL_PER_COLUMN'," + " MULTI_TENANT=true, REPLICATION_SCOPE=1\n"; String index = "CREATE INDEX IF NOT EXISTS MY_DATA_TABLE_INDEX\n" + " ON MY_SCHEMA.MY_SAMPLE_DATA_TABLE (SOME_ID_COLUMN, CREATED_DATE DESC," + " SOME_ID_COLUMN_2, SOME_ID_COLUMN_3)\n" + " INCLUDE\n" + "(SOME_ID_COLUMN_4, CREATED_BY_ID, VALUE_FIELD)\n"; List<String> queries = new ArrayList<String>() { }; queries.add(table); queries.add(index); String result = runSchemaExtractionTool("MY_SCHEMA", "MY_DATA_TABLE_INDEX", null, queries); String expected = "CREATE INDEX %s ON MY_SCHEMA.MY_SAMPLE_DATA_TABLE" + "(SOME_ID_COLUMN, CREATED_DATE DESC, SOME_ID_COLUMN_2, SOME_ID_COLUMN_3) " + "INCLUDE (SOME_ID_COLUMN_4, CREATED_BY_ID, VALUE_FIELD)"; Assert.assertEquals(String.format(expected, indexName).toUpperCase(), result.toUpperCase()); queries.clear(); String newIndex = indexName + "_NEW"; queries.add(String.format(expected, newIndex)); Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); try (Connection conn = DriverManager.getConnection(getUrl(), props)) { executeCreateStatements(conn, queries); } compareOrdinalPositions(indexName, newIndex); } @Test public void testCreateIndexStatementWithColumnFamily() throws Exception { String tableName = generateUniqueName(); String schemaName = generateUniqueName(); String indexName = generateUniqueName(); String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName, tableName); String createTableStmt = "CREATE TABLE " + pTableFullName + "(k VARCHAR NOT NULL PRIMARY KEY, " + "\"av\".\"_\" CHAR(1), v2 VARCHAR)"; String createIndexStmt = "CREATE INDEX " + indexName + " ON " + pTableFullName + "(\"av\".\"_\")"; List<String> queries = new ArrayList<String>() { }; queries.add(createTableStmt); queries.add(createIndexStmt); // by the principle of having maximal columns in pk String expected = "CREATE INDEX %s ON " + pTableFullName + "(\"av\".\"_\", K)"; String result = runSchemaExtractionTool(schemaName, indexName, null, queries); Assert.assertEquals(String.format(expected, indexName).toUpperCase(), result.toUpperCase()); queries.clear(); String newIndex = indexName + "_NEW"; queries.add(String.format(expected, newIndex)); Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); try (Connection conn = DriverManager.getConnection(getUrl(), props)) { executeCreateStatements(conn, queries); } compareOrdinalPositions(indexName, newIndex); } @Test public void testSaltingWithOOOPKDefinitions() throws Exception { String schemaName = generateUniqueName(); String tableName = generateUniqueName(); String fullTableName = SchemaUtil.getQualifiedTableName(schemaName, tableName); // Order of definition of columns in create table is different from PK constraint String ddl = "CREATE TABLE IF NOT EXISTS " + fullTableName + "(ID1 CHAR(15) NOT NULL,\n" + "ID2 INTEGER NOT NULL,\n" + "TEXT VARCHAR,\n" + "INT INTEGER,\n" + "DOUBLE DECIMAL(12,3),\n" + "CREATED_DATE DATE NOT NULL,\n" + "TS TIMESTAMP\n" + "CONSTRAINT PK PRIMARY KEY (ID1, ID2, CREATED_DATE))\n" + "SALT_BUCKETS=16,MULTI_TENANT=true"; List<String> queries = new ArrayList(); queries.add(ddl); String result = runSchemaExtractionTool(schemaName, tableName, null, queries); String expected = "CREATE TABLE %s(ID1 CHAR(15) NOT NULL, ID2 INTEGER NOT NULL, TEXT VARCHAR, " + "INT INTEGER, DOUBLE DECIMAL(12,3), CREATED_DATE DATE NOT NULL, TS TIMESTAMP " + "CONSTRAINT PK PRIMARY KEY (ID1, ID2, CREATED_DATE)) " + "IMMUTABLE_STORAGE_SCHEME='ONE_CELL_PER_COLUMN', SALT_BUCKETS=16, MULTI_TENANT=true"; Assert.assertEquals(String.format(expected, fullTableName), result); } private Connection getTenantConnection(String url, String tenantId) throws SQLException { Properties props = new Properties(); props.setProperty(PhoenixRuntime.TENANT_ID_ATTRIB, tenantId); return DriverManager.getConnection(url, props); } private String runSchemaExtractionTool(String schemaName, String tableName, String tenantId, List<String> queries) throws Exception { Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); String output; if (tenantId == null) { try (Connection conn = DriverManager.getConnection(getUrl(), props)) { executeCreateStatements(conn, queries); String[] args = { "-m", "EXTRACT", "-tb", tableName, "-s", schemaName }; output = runSchemaTool(conn, args); } } else { try (Connection conn = getTenantConnection(getUrl(), tenantId)) { executeCreateStatements(conn, queries); String[] args = { "-m", "EXTRACT", "-tb", tableName, "-s", schemaName, "-t", tenantId }; output = runSchemaTool(conn, args); } } return output; } private void executeCreateStatements(Connection conn, List<String> queries) throws SQLException { for (String query : queries) { conn.createStatement().execute(query); } conn.commit(); } public static String runSchemaTool(Connection conn, String[] args) throws Exception { SchemaTool set = new SchemaTool(); if (conn != null) { set.setConf(conn.unwrap(PhoenixConnection.class).getQueryServices().getConfiguration()); } int ret = set.run(args); if (ret != 0) { throw new RuntimeException(String.format("Schema tool failed with error %d", ret)); } return set.getOutput(); } private String getProperties(String query) { return query.substring(query.lastIndexOf(")") + 1); } private boolean compareProperties(String prop1, String prop2) { String[] propArray1 = prop1.toUpperCase().replaceAll("\\s+", "").split(","); String[] propArray2 = prop2.toUpperCase().replaceAll("\\s+", "").split(","); Set<String> set1 = new HashSet<>(Arrays.asList(propArray1)); Set<String> set2 = new HashSet<>(Arrays.asList(propArray2)); return set1.equals(set2); } }
googleapis/google-cloud-java
35,136
java-grafeas/src/main/java/io/grafeas/v1/Artifact.java
/* * Copyright 2025 The Grafeas Authors. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: grafeas/v1/provenance.proto // Protobuf Java Version: 3.25.8 package io.grafeas.v1; /** * * * <pre> * Artifact describes a build product. * </pre> * * Protobuf type {@code grafeas.v1.Artifact} */ public final class Artifact extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:grafeas.v1.Artifact) ArtifactOrBuilder { private static final long serialVersionUID = 0L; // Use Artifact.newBuilder() to construct. private Artifact(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Artifact() { checksum_ = ""; id_ = ""; names_ = com.google.protobuf.LazyStringArrayList.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new Artifact(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return io.grafeas.v1.Provenance.internal_static_grafeas_v1_Artifact_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return io.grafeas.v1.Provenance.internal_static_grafeas_v1_Artifact_fieldAccessorTable .ensureFieldAccessorsInitialized( io.grafeas.v1.Artifact.class, io.grafeas.v1.Artifact.Builder.class); } public static final int CHECKSUM_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object checksum_ = ""; /** * * * <pre> * Hash or checksum value of a binary, or Docker Registry 2.0 digest of a * container. * </pre> * * <code>string checksum = 1;</code> * * @return The checksum. */ @java.lang.Override public java.lang.String getChecksum() { java.lang.Object ref = checksum_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); checksum_ = s; return s; } } /** * * * <pre> * Hash or checksum value of a binary, or Docker Registry 2.0 digest of a * container. * </pre> * * <code>string checksum = 1;</code> * * @return The bytes for checksum. */ @java.lang.Override public com.google.protobuf.ByteString getChecksumBytes() { java.lang.Object ref = checksum_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); checksum_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ID_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object id_ = ""; /** * * * <pre> * Artifact ID, if any; for container images, this will be a URL by digest * like `gcr.io/projectID/imagename&#64;sha256:123456`. * </pre> * * <code>string id = 2;</code> * * @return The id. */ @java.lang.Override public java.lang.String getId() { java.lang.Object ref = id_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); id_ = s; return s; } } /** * * * <pre> * Artifact ID, if any; for container images, this will be a URL by digest * like `gcr.io/projectID/imagename&#64;sha256:123456`. * </pre> * * <code>string id = 2;</code> * * @return The bytes for id. */ @java.lang.Override public com.google.protobuf.ByteString getIdBytes() { java.lang.Object ref = id_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); id_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int NAMES_FIELD_NUMBER = 3; @SuppressWarnings("serial") private com.google.protobuf.LazyStringArrayList names_ = com.google.protobuf.LazyStringArrayList.emptyList(); /** * * * <pre> * Related artifact names. This may be the path to a binary or jar file, or in * the case of a container build, the name used to push the container image to * Google Container Registry, as presented to `docker push`. Note that a * single Artifact ID can have multiple names, for example if two tags are * applied to one image. * </pre> * * <code>repeated string names = 3;</code> * * @return A list containing the names. */ public com.google.protobuf.ProtocolStringList getNamesList() { return names_; } /** * * * <pre> * Related artifact names. This may be the path to a binary or jar file, or in * the case of a container build, the name used to push the container image to * Google Container Registry, as presented to `docker push`. Note that a * single Artifact ID can have multiple names, for example if two tags are * applied to one image. * </pre> * * <code>repeated string names = 3;</code> * * @return The count of names. */ public int getNamesCount() { return names_.size(); } /** * * * <pre> * Related artifact names. This may be the path to a binary or jar file, or in * the case of a container build, the name used to push the container image to * Google Container Registry, as presented to `docker push`. Note that a * single Artifact ID can have multiple names, for example if two tags are * applied to one image. * </pre> * * <code>repeated string names = 3;</code> * * @param index The index of the element to return. * @return The names at the given index. */ public java.lang.String getNames(int index) { return names_.get(index); } /** * * * <pre> * Related artifact names. This may be the path to a binary or jar file, or in * the case of a container build, the name used to push the container image to * Google Container Registry, as presented to `docker push`. Note that a * single Artifact ID can have multiple names, for example if two tags are * applied to one image. * </pre> * * <code>repeated string names = 3;</code> * * @param index The index of the value to return. * @return The bytes of the names at the given index. */ public com.google.protobuf.ByteString getNamesBytes(int index) { return names_.getByteString(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(checksum_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, checksum_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, id_); } for (int i = 0; i < names_.size(); i++) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, names_.getRaw(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(checksum_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, checksum_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, id_); } { int dataSize = 0; for (int i = 0; i < names_.size(); i++) { dataSize += computeStringSizeNoTag(names_.getRaw(i)); } size += dataSize; size += 1 * getNamesList().size(); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof io.grafeas.v1.Artifact)) { return super.equals(obj); } io.grafeas.v1.Artifact other = (io.grafeas.v1.Artifact) obj; if (!getChecksum().equals(other.getChecksum())) return false; if (!getId().equals(other.getId())) return false; if (!getNamesList().equals(other.getNamesList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + CHECKSUM_FIELD_NUMBER; hash = (53 * hash) + getChecksum().hashCode(); hash = (37 * hash) + ID_FIELD_NUMBER; hash = (53 * hash) + getId().hashCode(); if (getNamesCount() > 0) { hash = (37 * hash) + NAMES_FIELD_NUMBER; hash = (53 * hash) + getNamesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static io.grafeas.v1.Artifact parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static io.grafeas.v1.Artifact parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static io.grafeas.v1.Artifact parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static io.grafeas.v1.Artifact parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static io.grafeas.v1.Artifact parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static io.grafeas.v1.Artifact parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static io.grafeas.v1.Artifact parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static io.grafeas.v1.Artifact parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static io.grafeas.v1.Artifact parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static io.grafeas.v1.Artifact parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static io.grafeas.v1.Artifact parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static io.grafeas.v1.Artifact parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(io.grafeas.v1.Artifact prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Artifact describes a build product. * </pre> * * Protobuf type {@code grafeas.v1.Artifact} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:grafeas.v1.Artifact) io.grafeas.v1.ArtifactOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return io.grafeas.v1.Provenance.internal_static_grafeas_v1_Artifact_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return io.grafeas.v1.Provenance.internal_static_grafeas_v1_Artifact_fieldAccessorTable .ensureFieldAccessorsInitialized( io.grafeas.v1.Artifact.class, io.grafeas.v1.Artifact.Builder.class); } // Construct using io.grafeas.v1.Artifact.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; checksum_ = ""; id_ = ""; names_ = com.google.protobuf.LazyStringArrayList.emptyList(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return io.grafeas.v1.Provenance.internal_static_grafeas_v1_Artifact_descriptor; } @java.lang.Override public io.grafeas.v1.Artifact getDefaultInstanceForType() { return io.grafeas.v1.Artifact.getDefaultInstance(); } @java.lang.Override public io.grafeas.v1.Artifact build() { io.grafeas.v1.Artifact result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public io.grafeas.v1.Artifact buildPartial() { io.grafeas.v1.Artifact result = new io.grafeas.v1.Artifact(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(io.grafeas.v1.Artifact result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.checksum_ = checksum_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.id_ = id_; } if (((from_bitField0_ & 0x00000004) != 0)) { names_.makeImmutable(); result.names_ = names_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof io.grafeas.v1.Artifact) { return mergeFrom((io.grafeas.v1.Artifact) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(io.grafeas.v1.Artifact other) { if (other == io.grafeas.v1.Artifact.getDefaultInstance()) return this; if (!other.getChecksum().isEmpty()) { checksum_ = other.checksum_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getId().isEmpty()) { id_ = other.id_; bitField0_ |= 0x00000002; onChanged(); } if (!other.names_.isEmpty()) { if (names_.isEmpty()) { names_ = other.names_; bitField0_ |= 0x00000004; } else { ensureNamesIsMutable(); names_.addAll(other.names_); } onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { checksum_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { id_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { java.lang.String s = input.readStringRequireUtf8(); ensureNamesIsMutable(); names_.add(s); break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object checksum_ = ""; /** * * * <pre> * Hash or checksum value of a binary, or Docker Registry 2.0 digest of a * container. * </pre> * * <code>string checksum = 1;</code> * * @return The checksum. */ public java.lang.String getChecksum() { java.lang.Object ref = checksum_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); checksum_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Hash or checksum value of a binary, or Docker Registry 2.0 digest of a * container. * </pre> * * <code>string checksum = 1;</code> * * @return The bytes for checksum. */ public com.google.protobuf.ByteString getChecksumBytes() { java.lang.Object ref = checksum_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); checksum_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Hash or checksum value of a binary, or Docker Registry 2.0 digest of a * container. * </pre> * * <code>string checksum = 1;</code> * * @param value The checksum to set. * @return This builder for chaining. */ public Builder setChecksum(java.lang.String value) { if (value == null) { throw new NullPointerException(); } checksum_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Hash or checksum value of a binary, or Docker Registry 2.0 digest of a * container. * </pre> * * <code>string checksum = 1;</code> * * @return This builder for chaining. */ public Builder clearChecksum() { checksum_ = getDefaultInstance().getChecksum(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Hash or checksum value of a binary, or Docker Registry 2.0 digest of a * container. * </pre> * * <code>string checksum = 1;</code> * * @param value The bytes for checksum to set. * @return This builder for chaining. */ public Builder setChecksumBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); checksum_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object id_ = ""; /** * * * <pre> * Artifact ID, if any; for container images, this will be a URL by digest * like `gcr.io/projectID/imagename&#64;sha256:123456`. * </pre> * * <code>string id = 2;</code> * * @return The id. */ public java.lang.String getId() { java.lang.Object ref = id_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); id_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Artifact ID, if any; for container images, this will be a URL by digest * like `gcr.io/projectID/imagename&#64;sha256:123456`. * </pre> * * <code>string id = 2;</code> * * @return The bytes for id. */ public com.google.protobuf.ByteString getIdBytes() { java.lang.Object ref = id_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); id_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Artifact ID, if any; for container images, this will be a URL by digest * like `gcr.io/projectID/imagename&#64;sha256:123456`. * </pre> * * <code>string id = 2;</code> * * @param value The id to set. * @return This builder for chaining. */ public Builder setId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } id_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Artifact ID, if any; for container images, this will be a URL by digest * like `gcr.io/projectID/imagename&#64;sha256:123456`. * </pre> * * <code>string id = 2;</code> * * @return This builder for chaining. */ public Builder clearId() { id_ = getDefaultInstance().getId(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Artifact ID, if any; for container images, this will be a URL by digest * like `gcr.io/projectID/imagename&#64;sha256:123456`. * </pre> * * <code>string id = 2;</code> * * @param value The bytes for id to set. * @return This builder for chaining. */ public Builder setIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); id_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private com.google.protobuf.LazyStringArrayList names_ = com.google.protobuf.LazyStringArrayList.emptyList(); private void ensureNamesIsMutable() { if (!names_.isModifiable()) { names_ = new com.google.protobuf.LazyStringArrayList(names_); } bitField0_ |= 0x00000004; } /** * * * <pre> * Related artifact names. This may be the path to a binary or jar file, or in * the case of a container build, the name used to push the container image to * Google Container Registry, as presented to `docker push`. Note that a * single Artifact ID can have multiple names, for example if two tags are * applied to one image. * </pre> * * <code>repeated string names = 3;</code> * * @return A list containing the names. */ public com.google.protobuf.ProtocolStringList getNamesList() { names_.makeImmutable(); return names_; } /** * * * <pre> * Related artifact names. This may be the path to a binary or jar file, or in * the case of a container build, the name used to push the container image to * Google Container Registry, as presented to `docker push`. Note that a * single Artifact ID can have multiple names, for example if two tags are * applied to one image. * </pre> * * <code>repeated string names = 3;</code> * * @return The count of names. */ public int getNamesCount() { return names_.size(); } /** * * * <pre> * Related artifact names. This may be the path to a binary or jar file, or in * the case of a container build, the name used to push the container image to * Google Container Registry, as presented to `docker push`. Note that a * single Artifact ID can have multiple names, for example if two tags are * applied to one image. * </pre> * * <code>repeated string names = 3;</code> * * @param index The index of the element to return. * @return The names at the given index. */ public java.lang.String getNames(int index) { return names_.get(index); } /** * * * <pre> * Related artifact names. This may be the path to a binary or jar file, or in * the case of a container build, the name used to push the container image to * Google Container Registry, as presented to `docker push`. Note that a * single Artifact ID can have multiple names, for example if two tags are * applied to one image. * </pre> * * <code>repeated string names = 3;</code> * * @param index The index of the value to return. * @return The bytes of the names at the given index. */ public com.google.protobuf.ByteString getNamesBytes(int index) { return names_.getByteString(index); } /** * * * <pre> * Related artifact names. This may be the path to a binary or jar file, or in * the case of a container build, the name used to push the container image to * Google Container Registry, as presented to `docker push`. Note that a * single Artifact ID can have multiple names, for example if two tags are * applied to one image. * </pre> * * <code>repeated string names = 3;</code> * * @param index The index to set the value at. * @param value The names to set. * @return This builder for chaining. */ public Builder setNames(int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureNamesIsMutable(); names_.set(index, value); bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Related artifact names. This may be the path to a binary or jar file, or in * the case of a container build, the name used to push the container image to * Google Container Registry, as presented to `docker push`. Note that a * single Artifact ID can have multiple names, for example if two tags are * applied to one image. * </pre> * * <code>repeated string names = 3;</code> * * @param value The names to add. * @return This builder for chaining. */ public Builder addNames(java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureNamesIsMutable(); names_.add(value); bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Related artifact names. This may be the path to a binary or jar file, or in * the case of a container build, the name used to push the container image to * Google Container Registry, as presented to `docker push`. Note that a * single Artifact ID can have multiple names, for example if two tags are * applied to one image. * </pre> * * <code>repeated string names = 3;</code> * * @param values The names to add. * @return This builder for chaining. */ public Builder addAllNames(java.lang.Iterable<java.lang.String> values) { ensureNamesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, names_); bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Related artifact names. This may be the path to a binary or jar file, or in * the case of a container build, the name used to push the container image to * Google Container Registry, as presented to `docker push`. Note that a * single Artifact ID can have multiple names, for example if two tags are * applied to one image. * </pre> * * <code>repeated string names = 3;</code> * * @return This builder for chaining. */ public Builder clearNames() { names_ = com.google.protobuf.LazyStringArrayList.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); ; onChanged(); return this; } /** * * * <pre> * Related artifact names. This may be the path to a binary or jar file, or in * the case of a container build, the name used to push the container image to * Google Container Registry, as presented to `docker push`. Note that a * single Artifact ID can have multiple names, for example if two tags are * applied to one image. * </pre> * * <code>repeated string names = 3;</code> * * @param value The bytes of the names to add. * @return This builder for chaining. */ public Builder addNamesBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); ensureNamesIsMutable(); names_.add(value); bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:grafeas.v1.Artifact) } // @@protoc_insertion_point(class_scope:grafeas.v1.Artifact) private static final io.grafeas.v1.Artifact DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new io.grafeas.v1.Artifact(); } public static io.grafeas.v1.Artifact getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<Artifact> PARSER = new com.google.protobuf.AbstractParser<Artifact>() { @java.lang.Override public Artifact parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<Artifact> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<Artifact> getParserForType() { return PARSER; } @java.lang.Override public io.grafeas.v1.Artifact getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,212
java-analytics-data/proto-google-analytics-data-v1alpha/src/main/java/com/google/analytics/data/v1alpha/SessionSegment.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/analytics/data/v1alpha/data.proto // Protobuf Java Version: 3.25.8 package com.google.analytics.data.v1alpha; /** * * * <pre> * Session segments are subsets of the sessions that occurred on your site or * app: for example, all the sessions that originated from a particular * advertising campaign. * </pre> * * Protobuf type {@code google.analytics.data.v1alpha.SessionSegment} */ public final class SessionSegment extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.analytics.data.v1alpha.SessionSegment) SessionSegmentOrBuilder { private static final long serialVersionUID = 0L; // Use SessionSegment.newBuilder() to construct. private SessionSegment(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SessionSegment() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new SessionSegment(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.analytics.data.v1alpha.ReportingApiProto .internal_static_google_analytics_data_v1alpha_SessionSegment_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.analytics.data.v1alpha.ReportingApiProto .internal_static_google_analytics_data_v1alpha_SessionSegment_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.analytics.data.v1alpha.SessionSegment.class, com.google.analytics.data.v1alpha.SessionSegment.Builder.class); } private int bitField0_; public static final int SESSION_INCLUSION_CRITERIA_FIELD_NUMBER = 1; private com.google.analytics.data.v1alpha.SessionSegmentCriteria sessionInclusionCriteria_; /** * * * <pre> * Defines which sessions are included in this segment. Optional. * </pre> * * <code>.google.analytics.data.v1alpha.SessionSegmentCriteria session_inclusion_criteria = 1; * </code> * * @return Whether the sessionInclusionCriteria field is set. */ @java.lang.Override public boolean hasSessionInclusionCriteria() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Defines which sessions are included in this segment. Optional. * </pre> * * <code>.google.analytics.data.v1alpha.SessionSegmentCriteria session_inclusion_criteria = 1; * </code> * * @return The sessionInclusionCriteria. */ @java.lang.Override public com.google.analytics.data.v1alpha.SessionSegmentCriteria getSessionInclusionCriteria() { return sessionInclusionCriteria_ == null ? com.google.analytics.data.v1alpha.SessionSegmentCriteria.getDefaultInstance() : sessionInclusionCriteria_; } /** * * * <pre> * Defines which sessions are included in this segment. Optional. * </pre> * * <code>.google.analytics.data.v1alpha.SessionSegmentCriteria session_inclusion_criteria = 1; * </code> */ @java.lang.Override public com.google.analytics.data.v1alpha.SessionSegmentCriteriaOrBuilder getSessionInclusionCriteriaOrBuilder() { return sessionInclusionCriteria_ == null ? com.google.analytics.data.v1alpha.SessionSegmentCriteria.getDefaultInstance() : sessionInclusionCriteria_; } public static final int EXCLUSION_FIELD_NUMBER = 2; private com.google.analytics.data.v1alpha.SessionSegmentExclusion exclusion_; /** * * * <pre> * Defines which sessions are excluded in this segment. Optional. * </pre> * * <code>.google.analytics.data.v1alpha.SessionSegmentExclusion exclusion = 2;</code> * * @return Whether the exclusion field is set. */ @java.lang.Override public boolean hasExclusion() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Defines which sessions are excluded in this segment. Optional. * </pre> * * <code>.google.analytics.data.v1alpha.SessionSegmentExclusion exclusion = 2;</code> * * @return The exclusion. */ @java.lang.Override public com.google.analytics.data.v1alpha.SessionSegmentExclusion getExclusion() { return exclusion_ == null ? com.google.analytics.data.v1alpha.SessionSegmentExclusion.getDefaultInstance() : exclusion_; } /** * * * <pre> * Defines which sessions are excluded in this segment. Optional. * </pre> * * <code>.google.analytics.data.v1alpha.SessionSegmentExclusion exclusion = 2;</code> */ @java.lang.Override public com.google.analytics.data.v1alpha.SessionSegmentExclusionOrBuilder getExclusionOrBuilder() { return exclusion_ == null ? com.google.analytics.data.v1alpha.SessionSegmentExclusion.getDefaultInstance() : exclusion_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getSessionInclusionCriteria()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getExclusion()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 1, getSessionInclusionCriteria()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getExclusion()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.analytics.data.v1alpha.SessionSegment)) { return super.equals(obj); } com.google.analytics.data.v1alpha.SessionSegment other = (com.google.analytics.data.v1alpha.SessionSegment) obj; if (hasSessionInclusionCriteria() != other.hasSessionInclusionCriteria()) return false; if (hasSessionInclusionCriteria()) { if (!getSessionInclusionCriteria().equals(other.getSessionInclusionCriteria())) return false; } if (hasExclusion() != other.hasExclusion()) return false; if (hasExclusion()) { if (!getExclusion().equals(other.getExclusion())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasSessionInclusionCriteria()) { hash = (37 * hash) + SESSION_INCLUSION_CRITERIA_FIELD_NUMBER; hash = (53 * hash) + getSessionInclusionCriteria().hashCode(); } if (hasExclusion()) { hash = (37 * hash) + EXCLUSION_FIELD_NUMBER; hash = (53 * hash) + getExclusion().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.analytics.data.v1alpha.SessionSegment parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.data.v1alpha.SessionSegment parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.data.v1alpha.SessionSegment parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.data.v1alpha.SessionSegment parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.data.v1alpha.SessionSegment parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.data.v1alpha.SessionSegment parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.data.v1alpha.SessionSegment parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.analytics.data.v1alpha.SessionSegment parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.analytics.data.v1alpha.SessionSegment parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.analytics.data.v1alpha.SessionSegment parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.analytics.data.v1alpha.SessionSegment parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.analytics.data.v1alpha.SessionSegment parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.analytics.data.v1alpha.SessionSegment prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Session segments are subsets of the sessions that occurred on your site or * app: for example, all the sessions that originated from a particular * advertising campaign. * </pre> * * Protobuf type {@code google.analytics.data.v1alpha.SessionSegment} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.analytics.data.v1alpha.SessionSegment) com.google.analytics.data.v1alpha.SessionSegmentOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.analytics.data.v1alpha.ReportingApiProto .internal_static_google_analytics_data_v1alpha_SessionSegment_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.analytics.data.v1alpha.ReportingApiProto .internal_static_google_analytics_data_v1alpha_SessionSegment_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.analytics.data.v1alpha.SessionSegment.class, com.google.analytics.data.v1alpha.SessionSegment.Builder.class); } // Construct using com.google.analytics.data.v1alpha.SessionSegment.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getSessionInclusionCriteriaFieldBuilder(); getExclusionFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; sessionInclusionCriteria_ = null; if (sessionInclusionCriteriaBuilder_ != null) { sessionInclusionCriteriaBuilder_.dispose(); sessionInclusionCriteriaBuilder_ = null; } exclusion_ = null; if (exclusionBuilder_ != null) { exclusionBuilder_.dispose(); exclusionBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.analytics.data.v1alpha.ReportingApiProto .internal_static_google_analytics_data_v1alpha_SessionSegment_descriptor; } @java.lang.Override public com.google.analytics.data.v1alpha.SessionSegment getDefaultInstanceForType() { return com.google.analytics.data.v1alpha.SessionSegment.getDefaultInstance(); } @java.lang.Override public com.google.analytics.data.v1alpha.SessionSegment build() { com.google.analytics.data.v1alpha.SessionSegment result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.analytics.data.v1alpha.SessionSegment buildPartial() { com.google.analytics.data.v1alpha.SessionSegment result = new com.google.analytics.data.v1alpha.SessionSegment(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.analytics.data.v1alpha.SessionSegment result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.sessionInclusionCriteria_ = sessionInclusionCriteriaBuilder_ == null ? sessionInclusionCriteria_ : sessionInclusionCriteriaBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.exclusion_ = exclusionBuilder_ == null ? exclusion_ : exclusionBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.analytics.data.v1alpha.SessionSegment) { return mergeFrom((com.google.analytics.data.v1alpha.SessionSegment) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.analytics.data.v1alpha.SessionSegment other) { if (other == com.google.analytics.data.v1alpha.SessionSegment.getDefaultInstance()) return this; if (other.hasSessionInclusionCriteria()) { mergeSessionInclusionCriteria(other.getSessionInclusionCriteria()); } if (other.hasExclusion()) { mergeExclusion(other.getExclusion()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getSessionInclusionCriteriaFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getExclusionFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.analytics.data.v1alpha.SessionSegmentCriteria sessionInclusionCriteria_; private com.google.protobuf.SingleFieldBuilderV3< com.google.analytics.data.v1alpha.SessionSegmentCriteria, com.google.analytics.data.v1alpha.SessionSegmentCriteria.Builder, com.google.analytics.data.v1alpha.SessionSegmentCriteriaOrBuilder> sessionInclusionCriteriaBuilder_; /** * * * <pre> * Defines which sessions are included in this segment. Optional. * </pre> * * <code>.google.analytics.data.v1alpha.SessionSegmentCriteria session_inclusion_criteria = 1; * </code> * * @return Whether the sessionInclusionCriteria field is set. */ public boolean hasSessionInclusionCriteria() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Defines which sessions are included in this segment. Optional. * </pre> * * <code>.google.analytics.data.v1alpha.SessionSegmentCriteria session_inclusion_criteria = 1; * </code> * * @return The sessionInclusionCriteria. */ public com.google.analytics.data.v1alpha.SessionSegmentCriteria getSessionInclusionCriteria() { if (sessionInclusionCriteriaBuilder_ == null) { return sessionInclusionCriteria_ == null ? com.google.analytics.data.v1alpha.SessionSegmentCriteria.getDefaultInstance() : sessionInclusionCriteria_; } else { return sessionInclusionCriteriaBuilder_.getMessage(); } } /** * * * <pre> * Defines which sessions are included in this segment. Optional. * </pre> * * <code>.google.analytics.data.v1alpha.SessionSegmentCriteria session_inclusion_criteria = 1; * </code> */ public Builder setSessionInclusionCriteria( com.google.analytics.data.v1alpha.SessionSegmentCriteria value) { if (sessionInclusionCriteriaBuilder_ == null) { if (value == null) { throw new NullPointerException(); } sessionInclusionCriteria_ = value; } else { sessionInclusionCriteriaBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Defines which sessions are included in this segment. Optional. * </pre> * * <code>.google.analytics.data.v1alpha.SessionSegmentCriteria session_inclusion_criteria = 1; * </code> */ public Builder setSessionInclusionCriteria( com.google.analytics.data.v1alpha.SessionSegmentCriteria.Builder builderForValue) { if (sessionInclusionCriteriaBuilder_ == null) { sessionInclusionCriteria_ = builderForValue.build(); } else { sessionInclusionCriteriaBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Defines which sessions are included in this segment. Optional. * </pre> * * <code>.google.analytics.data.v1alpha.SessionSegmentCriteria session_inclusion_criteria = 1; * </code> */ public Builder mergeSessionInclusionCriteria( com.google.analytics.data.v1alpha.SessionSegmentCriteria value) { if (sessionInclusionCriteriaBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && sessionInclusionCriteria_ != null && sessionInclusionCriteria_ != com.google.analytics.data.v1alpha.SessionSegmentCriteria.getDefaultInstance()) { getSessionInclusionCriteriaBuilder().mergeFrom(value); } else { sessionInclusionCriteria_ = value; } } else { sessionInclusionCriteriaBuilder_.mergeFrom(value); } if (sessionInclusionCriteria_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Defines which sessions are included in this segment. Optional. * </pre> * * <code>.google.analytics.data.v1alpha.SessionSegmentCriteria session_inclusion_criteria = 1; * </code> */ public Builder clearSessionInclusionCriteria() { bitField0_ = (bitField0_ & ~0x00000001); sessionInclusionCriteria_ = null; if (sessionInclusionCriteriaBuilder_ != null) { sessionInclusionCriteriaBuilder_.dispose(); sessionInclusionCriteriaBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Defines which sessions are included in this segment. Optional. * </pre> * * <code>.google.analytics.data.v1alpha.SessionSegmentCriteria session_inclusion_criteria = 1; * </code> */ public com.google.analytics.data.v1alpha.SessionSegmentCriteria.Builder getSessionInclusionCriteriaBuilder() { bitField0_ |= 0x00000001; onChanged(); return getSessionInclusionCriteriaFieldBuilder().getBuilder(); } /** * * * <pre> * Defines which sessions are included in this segment. Optional. * </pre> * * <code>.google.analytics.data.v1alpha.SessionSegmentCriteria session_inclusion_criteria = 1; * </code> */ public com.google.analytics.data.v1alpha.SessionSegmentCriteriaOrBuilder getSessionInclusionCriteriaOrBuilder() { if (sessionInclusionCriteriaBuilder_ != null) { return sessionInclusionCriteriaBuilder_.getMessageOrBuilder(); } else { return sessionInclusionCriteria_ == null ? com.google.analytics.data.v1alpha.SessionSegmentCriteria.getDefaultInstance() : sessionInclusionCriteria_; } } /** * * * <pre> * Defines which sessions are included in this segment. Optional. * </pre> * * <code>.google.analytics.data.v1alpha.SessionSegmentCriteria session_inclusion_criteria = 1; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.analytics.data.v1alpha.SessionSegmentCriteria, com.google.analytics.data.v1alpha.SessionSegmentCriteria.Builder, com.google.analytics.data.v1alpha.SessionSegmentCriteriaOrBuilder> getSessionInclusionCriteriaFieldBuilder() { if (sessionInclusionCriteriaBuilder_ == null) { sessionInclusionCriteriaBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.analytics.data.v1alpha.SessionSegmentCriteria, com.google.analytics.data.v1alpha.SessionSegmentCriteria.Builder, com.google.analytics.data.v1alpha.SessionSegmentCriteriaOrBuilder>( getSessionInclusionCriteria(), getParentForChildren(), isClean()); sessionInclusionCriteria_ = null; } return sessionInclusionCriteriaBuilder_; } private com.google.analytics.data.v1alpha.SessionSegmentExclusion exclusion_; private com.google.protobuf.SingleFieldBuilderV3< com.google.analytics.data.v1alpha.SessionSegmentExclusion, com.google.analytics.data.v1alpha.SessionSegmentExclusion.Builder, com.google.analytics.data.v1alpha.SessionSegmentExclusionOrBuilder> exclusionBuilder_; /** * * * <pre> * Defines which sessions are excluded in this segment. Optional. * </pre> * * <code>.google.analytics.data.v1alpha.SessionSegmentExclusion exclusion = 2;</code> * * @return Whether the exclusion field is set. */ public boolean hasExclusion() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Defines which sessions are excluded in this segment. Optional. * </pre> * * <code>.google.analytics.data.v1alpha.SessionSegmentExclusion exclusion = 2;</code> * * @return The exclusion. */ public com.google.analytics.data.v1alpha.SessionSegmentExclusion getExclusion() { if (exclusionBuilder_ == null) { return exclusion_ == null ? com.google.analytics.data.v1alpha.SessionSegmentExclusion.getDefaultInstance() : exclusion_; } else { return exclusionBuilder_.getMessage(); } } /** * * * <pre> * Defines which sessions are excluded in this segment. Optional. * </pre> * * <code>.google.analytics.data.v1alpha.SessionSegmentExclusion exclusion = 2;</code> */ public Builder setExclusion(com.google.analytics.data.v1alpha.SessionSegmentExclusion value) { if (exclusionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } exclusion_ = value; } else { exclusionBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Defines which sessions are excluded in this segment. Optional. * </pre> * * <code>.google.analytics.data.v1alpha.SessionSegmentExclusion exclusion = 2;</code> */ public Builder setExclusion( com.google.analytics.data.v1alpha.SessionSegmentExclusion.Builder builderForValue) { if (exclusionBuilder_ == null) { exclusion_ = builderForValue.build(); } else { exclusionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Defines which sessions are excluded in this segment. Optional. * </pre> * * <code>.google.analytics.data.v1alpha.SessionSegmentExclusion exclusion = 2;</code> */ public Builder mergeExclusion(com.google.analytics.data.v1alpha.SessionSegmentExclusion value) { if (exclusionBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && exclusion_ != null && exclusion_ != com.google.analytics.data.v1alpha.SessionSegmentExclusion.getDefaultInstance()) { getExclusionBuilder().mergeFrom(value); } else { exclusion_ = value; } } else { exclusionBuilder_.mergeFrom(value); } if (exclusion_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Defines which sessions are excluded in this segment. Optional. * </pre> * * <code>.google.analytics.data.v1alpha.SessionSegmentExclusion exclusion = 2;</code> */ public Builder clearExclusion() { bitField0_ = (bitField0_ & ~0x00000002); exclusion_ = null; if (exclusionBuilder_ != null) { exclusionBuilder_.dispose(); exclusionBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Defines which sessions are excluded in this segment. Optional. * </pre> * * <code>.google.analytics.data.v1alpha.SessionSegmentExclusion exclusion = 2;</code> */ public com.google.analytics.data.v1alpha.SessionSegmentExclusion.Builder getExclusionBuilder() { bitField0_ |= 0x00000002; onChanged(); return getExclusionFieldBuilder().getBuilder(); } /** * * * <pre> * Defines which sessions are excluded in this segment. Optional. * </pre> * * <code>.google.analytics.data.v1alpha.SessionSegmentExclusion exclusion = 2;</code> */ public com.google.analytics.data.v1alpha.SessionSegmentExclusionOrBuilder getExclusionOrBuilder() { if (exclusionBuilder_ != null) { return exclusionBuilder_.getMessageOrBuilder(); } else { return exclusion_ == null ? com.google.analytics.data.v1alpha.SessionSegmentExclusion.getDefaultInstance() : exclusion_; } } /** * * * <pre> * Defines which sessions are excluded in this segment. Optional. * </pre> * * <code>.google.analytics.data.v1alpha.SessionSegmentExclusion exclusion = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.analytics.data.v1alpha.SessionSegmentExclusion, com.google.analytics.data.v1alpha.SessionSegmentExclusion.Builder, com.google.analytics.data.v1alpha.SessionSegmentExclusionOrBuilder> getExclusionFieldBuilder() { if (exclusionBuilder_ == null) { exclusionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.analytics.data.v1alpha.SessionSegmentExclusion, com.google.analytics.data.v1alpha.SessionSegmentExclusion.Builder, com.google.analytics.data.v1alpha.SessionSegmentExclusionOrBuilder>( getExclusion(), getParentForChildren(), isClean()); exclusion_ = null; } return exclusionBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.analytics.data.v1alpha.SessionSegment) } // @@protoc_insertion_point(class_scope:google.analytics.data.v1alpha.SessionSegment) private static final com.google.analytics.data.v1alpha.SessionSegment DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.analytics.data.v1alpha.SessionSegment(); } public static com.google.analytics.data.v1alpha.SessionSegment getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SessionSegment> PARSER = new com.google.protobuf.AbstractParser<SessionSegment>() { @java.lang.Override public SessionSegment parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<SessionSegment> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SessionSegment> getParserForType() { return PARSER; } @java.lang.Override public com.google.analytics.data.v1alpha.SessionSegment getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
oracle/graal
35,414
compiler/src/jdk.graal.compiler/src/jdk/graal/compiler/nodes/GraphState.java
/* * Copyright (c) 2022, 2025, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package jdk.graal.compiler.nodes; import java.util.EnumSet; import java.util.Formatter; import java.util.Locale; import java.util.Objects; import jdk.graal.compiler.api.replacements.Snippet; import jdk.graal.compiler.debug.Assertions; import jdk.graal.compiler.graph.Graph; import jdk.graal.compiler.nodes.calc.FloatingNode; import jdk.graal.compiler.nodes.util.GraphUtil; import jdk.vm.ci.meta.SpeculationLog; /** * Describes {@link StructuredGraph} state with respect to compilation. * * This state is defined by fields which represent * <ul> * <li>The progress made in the compilation (e.g. which stages have been reached (see * {@link StageFlag}), which verification is performed by {@link FrameState}s (see * {@link FrameStateVerification}))</li> * <li>The properties of the intermediate representation (e.g. if new {@link DeoptimizingNode}s can * be introduced (see {@link GuardsStage}), if the graph contains nodes that require a stage to be * applied on the graph (see {@link #getFutureRequiredStages()}))</li> * </ul> */ public final class GraphState { /** * These sets of {@link StageFlag}s represent the necessary stages that must be applied to a * {@link StructuredGraph} for a complete compilation. */ private static final EnumSet<StageFlag> HIGH_TIER_MANDATORY_STAGES = EnumSet.of(StageFlag.LOOP_OVERFLOWS_CHECKED, StageFlag.HIGH_TIER_LOWERING); private static final EnumSet<StageFlag> MID_TIER_MANDATORY_STAGES = EnumSet.of( StageFlag.VALUE_PROXY_REMOVAL, StageFlag.SAFEPOINTS_INSERTION, StageFlag.GUARD_LOWERING, StageFlag.MID_TIER_LOWERING, StageFlag.FSA, StageFlag.MID_TIER_BARRIER_ADDITION); private static final EnumSet<StageFlag> LOW_TIER_MANDATORY_STAGES = EnumSet.of( StageFlag.LOW_TIER_LOWERING, StageFlag.EXPAND_LOGIC, StageFlag.ADDRESS_LOWERING, StageFlag.REMOVE_OPAQUE_VALUES, StageFlag.FINAL_SCHEDULE, StageFlag.LOW_TIER_BARRIER_ADDITION); private static final EnumSet<StageFlag> ENTERPRISE_MID_TIER_MANDATORY_STAGES = EnumSet.of( StageFlag.OPTIMISTIC_ALIASING, StageFlag.GUARD_LOWERING, StageFlag.VALUE_PROXY_REMOVAL, StageFlag.SAFEPOINTS_INSERTION, StageFlag.MID_TIER_LOWERING, StageFlag.FSA, StageFlag.NODE_VECTORIZATION, StageFlag.MID_TIER_BARRIER_ADDITION); /** * This set of {@link StageFlag}s represents the stages a {@link StructuredGraph} initially * requires to correctly pass all the other stages of the compilation. (See * {@link #getFutureRequiredStages()}) */ public static final EnumSet<StageFlag> INITIAL_REQUIRED_STAGES = EnumSet.of(StageFlag.CANONICALIZATION); /** * Indicates a stage is in progress. */ private StageFlag currentStage; /** * Flag to indicate {@link #forceDisableFrameStateVerification()} was called. */ private boolean disabledFrameStateVerification; /** * Represents the status of {@linkplain FrameState} verification of * {@linkplain AbstractStateSplit} state after. */ private FrameStateVerification frameStateVerification; /** * Records the stages required by this graph. For example, if a stage introduces nodes that need * to be lowered in the graph, the graph will require a lowering stage to be in a correct state * after the compilation. After the lowering has been executed, the requirement will be * fulfilled. {@link GraphState#futureRequiredStages} should be empty after the compilation. */ private EnumSet<StageFlag> futureRequiredStages; /** * Represents the state and properties of {@link DeoptimizingNode}s and {@link FrameState}s in * the graph. */ private GuardsStage guardsStage; /** * Records which stages have been applied to the graph. */ private EnumSet<StageFlag> stageFlags; /** * Contains the {@link SpeculationLog} used to perform speculative operations on this graph. */ private final SpeculationLog speculationLog; /** * Creates a {@link GraphState} with the given fields. * * @param isSubstitution determines this {@linkplain #getFrameStateVerification() frame state * verification}. {@link FrameStateVerification#NONE} is used if it is {@code true}, * otherwise it is {@link FrameStateVerification#ALL}. If it is {@code true}, * {@link #isFrameStateVerificationDisabled()} will be {@code true}. */ public GraphState(StageFlag currentStage, boolean disabledFrameStateVerification, boolean isSubstitution, EnumSet<StageFlag> futureRequiredStages, GuardsStage guardsStage, SpeculationLog speculationLog, EnumSet<StageFlag> stageFlags) { this(currentStage, disabledFrameStateVerification || isSubstitution, isSubstitution ? FrameStateVerification.NONE : FrameStateVerification.ALL, futureRequiredStages, guardsStage, speculationLog, stageFlags); } /** * Creates a {@link GraphState} with the given fields. * * @param guardsStage the {@link GuardsStage} of this graph state, * {@link GuardsStage#FLOATING_GUARDS} if it is {@code null}. */ public GraphState(StageFlag currentStage, boolean disabledFrameStateVerification, FrameStateVerification frameStateVerification, EnumSet<StageFlag> futureRequiredStages, GuardsStage guardsStage, SpeculationLog speculationLog, EnumSet<StageFlag> stageFlags) { this.currentStage = currentStage; this.disabledFrameStateVerification = disabledFrameStateVerification; this.frameStateVerification = frameStateVerification; this.futureRequiredStages = futureRequiredStages == null ? EnumSet.noneOf(StageFlag.class) : futureRequiredStages; this.guardsStage = guardsStage == null ? GuardsStage.FLOATING_GUARDS : guardsStage; this.speculationLog = speculationLog; this.stageFlags = stageFlags == null ? EnumSet.noneOf(StageFlag.class) : stageFlags; } /** * Creates a {@link GraphState} with {@linkplain #getGuardsStage() guards stage} set to * {@link GuardsStage#FLOATING_GUARDS}, empty {@link EnumSet} for {@linkplain #getStageFlags() * stage flags} and {@linkplain #getFutureRequiredStages() future required stages}, * {@linkplain #getFrameStateVerification() frame state verification} set to * {@link FrameStateVerification#ALL} and {@code null} for the other fields. */ public static GraphState defaultGraphState() { return new GraphState(null, false, false, null, null, null, null); } /** * Creates a copy of this graph state. The copy's {@linkplain #getStageFlags() stage flags} and * {@linkplain #getFutureRequiredStages() future required stages} are deep copy of this graph * state's respective fields. */ public GraphState copy() { return new GraphState(this.currentStage, disabledFrameStateVerification, this.frameStateVerification, EnumSet.copyOf(this.futureRequiredStages), this.guardsStage, this.speculationLog, EnumSet.copyOf(this.stageFlags)); } /** * Creates a copy of this graph state with the given {@linkplain #getSpeculationLog() * speculation log}. The copy's {@linkplain #getStageFlags() stage flags} and * {@linkplain #getFutureRequiredStages() future required stages} are deep copy of this graph * state's respective fields. * * @param isSubstitution determines the copy's {@linkplain #getFrameStateVerification() frame * state verification}. (See * {@link #GraphState(StageFlag, boolean, boolean, EnumSet, GuardsStage, SpeculationLog, EnumSet)}) */ public GraphState copyWith(boolean isSubstitution, SpeculationLog speculationLogForCopy) { return new GraphState(this.currentStage, disabledFrameStateVerification, isSubstitution, EnumSet.copyOf(this.futureRequiredStages), this.guardsStage, speculationLogForCopy, EnumSet.copyOf(this.stageFlags)); } @Override public String toString() { return toString(""); } /** * Creates a {@link String} with this {@linkplain #getGuardsStage() guards stage}, * {@linkplain #getStageFlags() stage flags}, {@linkplain #getFrameStateVerification() frame * state verification} and {@linkplain #getFutureRequiredStages() future required stages}. * * @param prefix the string inserted at the beginning of each line of the resulting string. */ public String toString(String prefix) { Formatter formatter = new Formatter(); formatter.format("%sGraphState:%n", prefix); formatter.format("%s\tGuards stage: %s%n", prefix, guardsStage.toString()); formatter.format("%s\tStage flags:%n", prefix); for (StageFlag flag : stageFlags) { formatter.format("%s\t\t%s%n", prefix, flag.toString()); } formatter.format("%s\tFrame state verification: %s%n", prefix, frameStateVerification.toString()); formatter.format("%s\tFuture required stages: %s%n", prefix, futureRequiredStages.toString()); return formatter.toString(); } /** * Creates a {@link String} representing the differences between this * {@linkplain #getGuardsStage() guards stage}, {@linkplain #getStageFlags() stage flags}, * {@linkplain #getFrameStateVerification() frame state verification} and * {@linkplain #getFutureRequiredStages() future required stages} and {@code previous}'s * respective fields. If {@code this} {@link #equals} {@code previous}, an empty string is * returned. */ public String updateFromPreviousToString(GraphState previous) { if (this.equals(previous)) { return ""; } StringBuilder builder = new StringBuilder(); builder.append('{'); builder.append(valueStringAsDiff(previous.guardsStage, this.guardsStage, "Guards stage: ", ", ")); builder.append(newFlagsToString(previous.stageFlags, this.stageFlags, "+", "Stage flags: ")); builder.append(valueStringAsDiff(previous.frameStateVerification, this.frameStateVerification, "Frame state verification: ", ", ")); builder.append(newFlagsToString(previous.futureRequiredStages, this.futureRequiredStages, "+", "Future required stages: ")); builder.append(newFlagsToString(this.futureRequiredStages, previous.futureRequiredStages, "-", "")); if (builder.length() > 1) { builder.setLength(builder.length() - 2); } builder.append('}'); return builder.toString(); } /** * @return the {@link String} representing the difference between {@code oldValue} and * {@code newValue}, surrounded by {@code prefix} and {@code suffix}. If both values are * equal, returns an empty string. */ private static <T> String valueStringAsDiff(T oldValue, T newValue, String prefix, String suffix) { if (oldValue == newValue) { return ""; } return String.format("%s%s -> %s%s", prefix, oldValue, newValue, suffix); } /** * @return a {@link String} representing the {@link StageFlag}s that differ between * {@code oldSet} and {@code newSet}. If both sets are equal, returns an empty string. */ private static String newFlagsToString(EnumSet<StageFlag> oldSet, EnumSet<StageFlag> newSet, String flagPrefix, String prefix) { Formatter formatter = new Formatter(); EnumSet<StageFlag> newFlags = newSet.clone(); newFlags.removeAll(oldSet); if (!newFlags.isEmpty()) { formatter.format(prefix); for (StageFlag flag : newFlags) { formatter.format("%s%s, ", flagPrefix, flag.toString()); } } return formatter.toString(); } @Override public int hashCode() { return Objects.hash(currentStage, disabledFrameStateVerification, frameStateVerification, futureRequiredStages, guardsStage, speculationLog, stageFlags); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof GraphState)) { return false; } GraphState graphState = (GraphState) obj; return this.currentStage == graphState.currentStage && this.disabledFrameStateVerification == graphState.disabledFrameStateVerification && this.frameStateVerification == graphState.frameStateVerification && this.futureRequiredStages.equals(graphState.futureRequiredStages) && this.guardsStage == graphState.guardsStage && Objects.equals(this.speculationLog, graphState.speculationLog) && this.stageFlags.equals(graphState.stageFlags); } /** * @return the {@link SpeculationLog} used to perform speculative operations on this graph. */ public SpeculationLog getSpeculationLog() { return speculationLog; } /** * Determines if {@linkplain #getFrameStateVerification() frame state verification} has been * forcefully disabled. * * @return {@code true} if {@link #forceDisableFrameStateVerification()} or * {@link StructuredGraph#clearAllStateAfterForTestingOnly()} has been called or if this * graph was build as a substitution (see * {@link #GraphState(StageFlag, boolean, boolean, EnumSet, GuardsStage, SpeculationLog, EnumSet)}). */ public boolean isFrameStateVerificationDisabled() { return disabledFrameStateVerification; } /** * Different node types verified during {@linkplain GraphState.FrameStateVerification}. See * {@linkplain GraphState.FrameStateVerification} for details. */ public enum FrameStateVerificationFeature { STATE_SPLITS, MERGES, LOOP_BEGINS, LOOP_EXITS } /** * The different stages of the compilation of a {@link Graph} regarding the status of * {@linkplain FrameState} verification of {@linkplain AbstractStateSplit} state after. * Verification starts with the mode {@linkplain FrameStateVerification#ALL}, i.e., all state * splits with side-effects, merges and loop exits need a proper state after. The verification * mode progresses monotonously until the {@linkplain FrameStateVerification#NONE} mode is * reached. From there on, no further {@linkplain AbstractStateSplit#stateAfter} verification * happens. */ public enum FrameStateVerification { /** * Verify all {@linkplain AbstractStateSplit} nodes that return {@code true} for * {@linkplain AbstractStateSplit#hasSideEffect()} have a * {@linkplain AbstractStateSplit#stateAfter} assigned. Additionally, verify * {@linkplain LoopExitNode} and {@linkplain AbstractMergeNode} have a valid * {@linkplain AbstractStateSplit#stateAfter}. This is necessary to avoid missing * {@linkplain FrameState} after optimizations. See {@link GraphUtil#mayRemoveSplit} for * more details. * * This stage is the initial verification stage for every graph. */ ALL(EnumSet.allOf(FrameStateVerificationFeature.class)), /** * Same as {@linkplain #ALL} except that {@linkplain LoopExitNode} nodes are no longer * verified. */ ALL_EXCEPT_LOOP_EXIT(EnumSet.complementOf(EnumSet.of(FrameStateVerificationFeature.LOOP_EXITS))), /** * Same as {@linkplain #ALL_EXCEPT_LOOP_EXIT} except that {@linkplain LoopBeginNode} are no * longer verified. */ ALL_EXCEPT_LOOPS(EnumSet.complementOf(EnumSet.of(FrameStateVerificationFeature.LOOP_BEGINS, FrameStateVerificationFeature.LOOP_EXITS))), /** * Verification is disabled. Typically used after assigning {@linkplain FrameState} to * {@linkplain DeoptimizeNode} or for {@linkplain Snippet} compilations. */ NONE(EnumSet.noneOf(FrameStateVerificationFeature.class)); private EnumSet<FrameStateVerificationFeature> features; FrameStateVerification(EnumSet<FrameStateVerificationFeature> features) { this.features = features; } /** * Determines if the current verification mode implies this feature. * * @param feature the other verification feature to check * @return {@code true} if this verification mode implies the feature, {@code false} * otherwise */ boolean implies(FrameStateVerificationFeature feature) { return this.features.contains(feature); } } /** * @return the status of the {@link FrameState} verification of {@link AbstractStateSplit} state * after. */ public FrameStateVerification getFrameStateVerification() { return frameStateVerification; } /** * Checks if this {@linkplain #getFrameStateVerification() frame state verification} can be * weakened to the given {@link FrameStateVerification}. Verification can only be relaxed over * the course of compilation. */ public boolean canWeakenFrameStateVerification(FrameStateVerification stage) { if (isFrameStateVerificationDisabled()) { assert frameStateVerification == FrameStateVerification.NONE : "Frame state verification is disabled, should be NONE but is " + frameStateVerification; return true; } return frameStateVerification.ordinal() <= stage.ordinal(); } /** * Sets the given {@link FrameStateVerification} as this * {@linkplain #getFrameStateVerification() frame state verification}. */ public void weakenFrameStateVerification(FrameStateVerification newFrameStateVerification) { if (isFrameStateVerificationDisabled()) { assert frameStateVerification == FrameStateVerification.NONE : "Frame state verification is disabled, should be NONE but is " + frameStateVerification; return; } assert canWeakenFrameStateVerification(newFrameStateVerification) : "Old verification " + frameStateVerification + " must imply new verification " + newFrameStateVerification + ", i.e., verification can only be relaxed over the course of compilation"; frameStateVerification = newFrameStateVerification; } /** * Forcefully disable {@linkplain #getFrameStateVerification() frame state verification} for the * rest of this compilation. This must only be used for stubs, snippets, and test code that * builds custom compilation pipelines. * * Normal compilations must use {@link #weakenFrameStateVerification(FrameStateVerification)} to * progress through the standard stages of frame state verification. Calling this method is * <em>not</em> equivalent to calling {@code weakenFrameStateVerification(NONE)}. */ public void forceDisableFrameStateVerification() { weakenFrameStateVerification(FrameStateVerification.NONE); this.disabledFrameStateVerification = true; } /** * The different stages of the compilation of a {@link Graph} regarding the status of * {@link GuardNode}s, {@link DeoptimizingNode}s and {@link FrameState}s. The stage of a graph * progresses monotonously. */ public enum GuardsStage { /** * During this stage, there can be {@link FloatingNode floating} {@link DeoptimizingNode}s * such as {@link GuardNode}s. New {@link DeoptimizingNode}s can be introduced without * constraints. {@link FrameState}s are associated with {@link StateSplit} nodes. */ FLOATING_GUARDS, /** * During this stage, all {@link DeoptimizingNode}s must be {@link FixedNode fixed} but new * {@link DeoptimizingNode}s can still be introduced. {@link FrameState}s are still * associated with {@link StateSplit} nodes. */ FIXED_DEOPTS, /** * During this stage, all {@link DeoptimizingNode}s must be {@link FixedNode fixed}. New * {@link DeoptimizingNode}s cannot be introduced. {@link FrameState}s are now associated * with {@link DeoptimizingNode}s. */ AFTER_FSA; /** * Checks if this guards stage indicates that the graph may contain {@link FloatingNode * floating} {@link DeoptimizingNode}s such as {@link GuardNode}s. */ public boolean allowsFloatingGuards() { return this == FLOATING_GUARDS; } /** * Checks if this guards stage indicates new {@link DeoptimizingNode}s can be introduced in * the graph. */ public boolean allowsGuardInsertion() { return this.ordinal() <= FIXED_DEOPTS.ordinal(); } /** * Checks if this guards stage indicates all {@link FrameState}s are associated with * {@link DeoptimizingNode}s. */ public boolean areFrameStatesAtDeopts() { return this == AFTER_FSA; } /** * Checks if this guards stage indicates all {@link FrameState}s are associated with * {@link StateSplit} nodes. */ public boolean areFrameStatesAtSideEffects() { return !this.areFrameStatesAtDeopts(); } /** * Checks if this guards stage indicates all the {@link DeoptimizingNode}s are * {@link FixedNode fixed}. */ public boolean areDeoptsFixed() { return this.ordinal() >= FIXED_DEOPTS.ordinal(); } /** * Checks if this guards stage indicates a later or equivalent stage of the compilation than * the given stage. */ public boolean reachedGuardsStage(GuardsStage stage) { return this.ordinal() >= stage.ordinal(); } } /** * @return the current {@link GuardsStage} for this graph state. */ public GuardsStage getGuardsStage() { return guardsStage; } /** * Sets the {@linkplain #getGuardsStage() guards stage} to {@link GuardsStage#FLOATING_GUARDS}. */ public void initGuardsStage() { setGuardsStage(GuardsStage.FLOATING_GUARDS); } /** * Sets the {@linkplain #getGuardsStage() guards stage} of this graph state. The new * {@link GuardsStage} needs to indicate a progression in the compilation, not a regression. */ public void setGuardsStage(GuardsStage guardsStage) { assert guardsStage.ordinal() >= this.guardsStage.ordinal() : Assertions.errorMessageContext("this", this.guardsStage, "other", guardsStage); this.guardsStage = guardsStage; } /** * Determines if this graph state is configured in a way it only allows explicit exception edges * and no floating guards which would be lowered to deoptimize nodes. */ public boolean isExplicitExceptionsNoDeopt() { return guardsStage == GuardsStage.FIXED_DEOPTS && isAfterStage(StageFlag.GUARD_LOWERING); } /** * Determines if {@link jdk.graal.compiler.nodes.memory.FloatingReadNode FloatingReadNodes} are * allowed to be inserted. They should only be manually inserted if * {@link jdk.graal.compiler.phases.common.FloatingReadPhase} has been run and * {@link jdk.graal.compiler.phases.common.FixReadsPhase} has not. */ public boolean allowsFloatingReads() { return isAfterStage(StageFlag.FLOATING_READS) && isBeforeStage(StageFlag.FIXED_READS); } /** * Configure the graph to only allow explicit exception edges without floating guard nodes. That * is the graph: * * <ul> * <li>has explicit exception edges on {@link WithExceptionNode#exceptionEdge} successors</li> * <li>the graph does not support floating {@link GuardNode} as they lower to * {@link DeoptimizeNode}</li> * <li>{@link GuardNode} nodes are never lowered since they are not part of the graph. The graph * is always {@link #isAfterStage(StageFlag)} {@link StageFlag#GUARD_LOWERING}</li> * </ul> * * Note that this operation is only possible on empty graphs, i.e., it must be called at the * beginning of a compilation when a graph is created since it influences how the parser and * other components build the graph and meta data. */ public void configureExplicitExceptionsNoDeopt() { assert !isExplicitExceptionsNoDeopt(); assert stageFlags.isEmpty() : "Must not have set a stage flag before"; assert guardsStage.allowsFloatingGuards() : "Default guards stage is floating guards"; setGuardsStage(GraphState.GuardsStage.FIXED_DEOPTS); setAfterStage(StageFlag.GUARD_LOWERING); } public void configureExplicitExceptionsNoDeoptIfNecessary() { if (!isExplicitExceptionsNoDeopt()) { configureExplicitExceptionsNoDeopt(); } } /** * Indicates FSA has been applied to this graph. (See {@link #setGuardsStage(GuardsStage)} and * {@link #setAfterStage(StageFlag)}) */ public void setAfterFSA() { setGuardsStage(GuardsStage.AFTER_FSA); setAfterStage(StageFlag.FSA); } /** * Different stages of the compilation regarding the status of various graph properties. The * order used to defined theses stages corresponds to their order in a standard compilation. */ public enum StageFlag { PARTIAL_EVALUATION, CANONICALIZATION, /* Stages applied by high tier. */ LOOP_OVERFLOWS_CHECKED, PARTIAL_ESCAPE, FINAL_PARTIAL_ESCAPE, VECTOR_API_EXPANSION, HIGH_TIER_LOWERING, /* Stages applied by mid tier. */ FLOATING_READS, GUARD_MOVEMENT, GUARD_LOWERING, STRIP_MINING, VALUE_PROXY_REMOVAL, SAFEPOINTS_INSERTION, MID_TIER_LOWERING, OPTIMISTIC_ALIASING, FSA, NODE_VECTORIZATION, VECTOR_MATERIALIZATION, OPTIMISTIC_GUARDS, MID_TIER_BARRIER_ADDITION, BARRIER_ELIMINATION, /* Stages applied by low tier. */ LOW_TIER_LOWERING, VECTOR_LOWERING, EXPAND_LOGIC, FIXED_READS, LOW_TIER_BARRIER_ADDITION, PARTIAL_REDUNDANCY_SCHEDULE, ADDRESS_LOWERING, FINAL_CANONICALIZATION, REMOVE_OPAQUE_VALUES, TARGET_VECTOR_LOWERING, FINAL_SCHEDULE } /** * Checks if this graph state is before a stage. This stage must not be in progress (see * {@link #isDuringStage(StageFlag)}) nor have been applied yet (see * {@link #isAfterStage(StageFlag)}). */ public boolean isBeforeStage(StageFlag stage) { return !isDuringStage(stage) && !isAfterStage(stage); } /** * Phases may set this flag to indicate that a stage is in progress. This is optional: * {@link #isAfterStage(StageFlag)} may become true for a stage even if * {@link #isDuringStage(StageFlag)} was never set for that stage. */ public boolean isDuringStage(StageFlag stage) { return currentStage == stage; } /** * Checks if a stage has already been applied to this graph. */ public boolean isAfterStage(StageFlag stage) { return stageFlags.contains(stage); } /** * Checks if multiple stages have been already applied to this graph. */ public boolean isAfterStages(EnumSet<StageFlag> stages) { return stageFlags.containsAll(stages); } /** * Sets this {@link #currentStage} to indicate that a stage is in progress. This stage must not * have been applied yet. */ public void setDuringStage(StageFlag stage) { assert isBeforeStage(stage) : "Cannot set during stage " + stage + " since the graph is not before that stage"; currentStage = stage; } /** * Adds the given stage to this {@linkplain #getStageFlags() stage flags} to indicate this stage * has been applied. This stage must not have been applied yet. */ public void setAfterStage(StageFlag stage) { assert !isAfterStage(stage) : "Cannot set after stage " + stage + " since the graph is already in that state"; stageFlags.add(stage); currentStage = null; } /** * @return the stages (see {@link StageFlag}) that were applied to this graph. */ public EnumSet<StageFlag> getStageFlags() { return stageFlags; } /** * Checks if all the stages represented by the given {@link MandatoryStages} have been applied * to this graph. */ public boolean hasAllMandatoryStages(MandatoryStages mandatoryStages) { return stageFlags.containsAll(mandatoryStages.highTier) && stageFlags.containsAll(mandatoryStages.midTier) && stageFlags.containsAll(mandatoryStages.lowTier); } /** * @return the number of stages that are in {@code targetStages} but not in the * {@linkplain #getStageFlags() stage flags} of this graph state. */ public int countMissingStages(EnumSet<StageFlag> targetStages) { EnumSet<StageFlag> target = EnumSet.copyOf(targetStages); target.removeAll(stageFlags); return target.size(); } /** * Adds the given {@link StageFlag} to the {@linkplain #getFutureRequiredStages() future * required stages} of this graph state. */ public void addFutureStageRequirement(StageFlag stage) { futureRequiredStages.add(stage); } /** * Removes the {@linkplain #getFutureRequiredStages() requirement} to the given * {@link StageFlag} from this graph state. */ public void removeRequirementToStage(StageFlag stage) { futureRequiredStages.remove(stage); } /** * Checks if the given {@link StageFlag} is contained in this graph state's * {@linkplain #getFutureRequiredStages() future required stages}. */ public boolean requiresFutureStage(StageFlag stage) { return futureRequiredStages.contains(stage); } /** * Checks if this graph state has remaining {@link StageFlag}s requirements in * {@linkplain #getFutureRequiredStages() future required stages}. */ public boolean requiresFutureStages() { return !futureRequiredStages.isEmpty(); } /** * @return which stages this graph state requires. These {@linkplain #getFutureRequiredStages() * future required stages} might includes lowering phases for nodes that were introduced * in the graph by previous stages for example. */ public EnumSet<StageFlag> getFutureRequiredStages() { return futureRequiredStages; } /** * Represents the necessary stages that must be applied to a {@link StructuredGraph} for a * complete compilation depending on the compiler configuration chosen. There is a different * {@link EnumSet} of {@link StageFlag}s for each tier of the compilation. */ public enum MandatoryStages { ECONOMY(HIGH_TIER_MANDATORY_STAGES, MID_TIER_MANDATORY_STAGES, LOW_TIER_MANDATORY_STAGES), COMMUNITY(HIGH_TIER_MANDATORY_STAGES, MID_TIER_MANDATORY_STAGES, LOW_TIER_MANDATORY_STAGES), ENTERPRISE(HIGH_TIER_MANDATORY_STAGES, ENTERPRISE_MID_TIER_MANDATORY_STAGES, LOW_TIER_MANDATORY_STAGES); private final EnumSet<StageFlag> highTier; private final EnumSet<StageFlag> midTier; private final EnumSet<StageFlag> lowTier; MandatoryStages(EnumSet<StageFlag> highTier, EnumSet<StageFlag> midTier, EnumSet<StageFlag> lowTier) { this.highTier = highTier; this.midTier = midTier; this.lowTier = lowTier; } /** * @return the {@link MandatoryStages} corresponding to the given string. If no such value * is found, returns {@link #COMMUNITY}. */ public static MandatoryStages getFromName(String name) { switch (name.toLowerCase(Locale.ROOT)) { case "economy": return ECONOMY; case "community": return COMMUNITY; case "enterprise": return ENTERPRISE; default: return COMMUNITY; } } /** * @return the {@link EnumSet} of {@link StageFlag}s that are mandatory for high tier. */ public EnumSet<StageFlag> getHighTier() { return highTier; } /** * @return the {@link EnumSet} of {@link StageFlag}s that are mandatory for mid tier. */ public EnumSet<StageFlag> getMidTier() { return midTier; } /** * @return the {@link EnumSet} of {@link StageFlag}s that are mandatory for low tier. */ public EnumSet<StageFlag> getLowTier() { return lowTier; } } }
googleapis/google-cloud-java
35,154
java-shopping-merchant-accounts/proto-google-shopping-merchant-accounts-v1beta/src/main/java/com/google/shopping/merchant/accounts/v1beta/BusinessDayConfig.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/shopping/merchant/accounts/v1beta/shippingsettings.proto // Protobuf Java Version: 3.25.8 package com.google.shopping.merchant.accounts.v1beta; /** * * * <pre> * Business days of the warehouse. * </pre> * * Protobuf type {@code google.shopping.merchant.accounts.v1beta.BusinessDayConfig} */ public final class BusinessDayConfig extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.shopping.merchant.accounts.v1beta.BusinessDayConfig) BusinessDayConfigOrBuilder { private static final long serialVersionUID = 0L; // Use BusinessDayConfig.newBuilder() to construct. private BusinessDayConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private BusinessDayConfig() { businessDays_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new BusinessDayConfig(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.accounts.v1beta.ShippingSettingsProto .internal_static_google_shopping_merchant_accounts_v1beta_BusinessDayConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.accounts.v1beta.ShippingSettingsProto .internal_static_google_shopping_merchant_accounts_v1beta_BusinessDayConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig.class, com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Builder.class); } /** Protobuf enum {@code google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday} */ public enum Weekday implements com.google.protobuf.ProtocolMessageEnum { /** <code>WEEKDAY_UNSPECIFIED = 0;</code> */ WEEKDAY_UNSPECIFIED(0), /** <code>MONDAY = 1;</code> */ MONDAY(1), /** <code>TUESDAY = 2;</code> */ TUESDAY(2), /** <code>WEDNESDAY = 3;</code> */ WEDNESDAY(3), /** <code>THURSDAY = 4;</code> */ THURSDAY(4), /** <code>FRIDAY = 5;</code> */ FRIDAY(5), /** <code>SATURDAY = 6;</code> */ SATURDAY(6), /** <code>SUNDAY = 7;</code> */ SUNDAY(7), UNRECOGNIZED(-1), ; /** <code>WEEKDAY_UNSPECIFIED = 0;</code> */ public static final int WEEKDAY_UNSPECIFIED_VALUE = 0; /** <code>MONDAY = 1;</code> */ public static final int MONDAY_VALUE = 1; /** <code>TUESDAY = 2;</code> */ public static final int TUESDAY_VALUE = 2; /** <code>WEDNESDAY = 3;</code> */ public static final int WEDNESDAY_VALUE = 3; /** <code>THURSDAY = 4;</code> */ public static final int THURSDAY_VALUE = 4; /** <code>FRIDAY = 5;</code> */ public static final int FRIDAY_VALUE = 5; /** <code>SATURDAY = 6;</code> */ public static final int SATURDAY_VALUE = 6; /** <code>SUNDAY = 7;</code> */ public static final int SUNDAY_VALUE = 7; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static Weekday valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static Weekday forNumber(int value) { switch (value) { case 0: return WEEKDAY_UNSPECIFIED; case 1: return MONDAY; case 2: return TUESDAY; case 3: return WEDNESDAY; case 4: return THURSDAY; case 5: return FRIDAY; case 6: return SATURDAY; case 7: return SUNDAY; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<Weekday> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<Weekday> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<Weekday>() { public Weekday findValueByNumber(int number) { return Weekday.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig.getDescriptor() .getEnumTypes() .get(0); } private static final Weekday[] VALUES = values(); public static Weekday valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private Weekday(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday) } public static final int BUSINESS_DAYS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<java.lang.Integer> businessDays_; private static final com.google.protobuf.Internal.ListAdapter.Converter< java.lang.Integer, com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday> businessDays_converter_ = new com.google.protobuf.Internal.ListAdapter.Converter< java.lang.Integer, com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday>() { public com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday convert( java.lang.Integer from) { com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday result = com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday.forNumber( from); return result == null ? com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday .UNRECOGNIZED : result; } }; /** * * * <pre> * Required. Regular business days. * May not be empty. * </pre> * * <code> * repeated .google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday business_days = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return A list containing the businessDays. */ @java.lang.Override public java.util.List<com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday> getBusinessDaysList() { return new com.google.protobuf.Internal.ListAdapter< java.lang.Integer, com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday>( businessDays_, businessDays_converter_); } /** * * * <pre> * Required. Regular business days. * May not be empty. * </pre> * * <code> * repeated .google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday business_days = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The count of businessDays. */ @java.lang.Override public int getBusinessDaysCount() { return businessDays_.size(); } /** * * * <pre> * Required. Regular business days. * May not be empty. * </pre> * * <code> * repeated .google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday business_days = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @param index The index of the element to return. * @return The businessDays at the given index. */ @java.lang.Override public com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday getBusinessDays( int index) { return businessDays_converter_.convert(businessDays_.get(index)); } /** * * * <pre> * Required. Regular business days. * May not be empty. * </pre> * * <code> * repeated .google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday business_days = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return A list containing the enum numeric values on the wire for businessDays. */ @java.lang.Override public java.util.List<java.lang.Integer> getBusinessDaysValueList() { return businessDays_; } /** * * * <pre> * Required. Regular business days. * May not be empty. * </pre> * * <code> * repeated .google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday business_days = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @param index The index of the value to return. * @return The enum numeric value on the wire of businessDays at the given index. */ @java.lang.Override public int getBusinessDaysValue(int index) { return businessDays_.get(index); } private int businessDaysMemoizedSerializedSize; private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (getBusinessDaysList().size() > 0) { output.writeUInt32NoTag(10); output.writeUInt32NoTag(businessDaysMemoizedSerializedSize); } for (int i = 0; i < businessDays_.size(); i++) { output.writeEnumNoTag(businessDays_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; { int dataSize = 0; for (int i = 0; i < businessDays_.size(); i++) { dataSize += com.google.protobuf.CodedOutputStream.computeEnumSizeNoTag(businessDays_.get(i)); } size += dataSize; if (!getBusinessDaysList().isEmpty()) { size += 1; size += com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(dataSize); } businessDaysMemoizedSerializedSize = dataSize; } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig)) { return super.equals(obj); } com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig other = (com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig) obj; if (!businessDays_.equals(other.businessDays_)) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getBusinessDaysCount() > 0) { hash = (37 * hash) + BUSINESS_DAYS_FIELD_NUMBER; hash = (53 * hash) + businessDays_.hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Business days of the warehouse. * </pre> * * Protobuf type {@code google.shopping.merchant.accounts.v1beta.BusinessDayConfig} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.shopping.merchant.accounts.v1beta.BusinessDayConfig) com.google.shopping.merchant.accounts.v1beta.BusinessDayConfigOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.accounts.v1beta.ShippingSettingsProto .internal_static_google_shopping_merchant_accounts_v1beta_BusinessDayConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.accounts.v1beta.ShippingSettingsProto .internal_static_google_shopping_merchant_accounts_v1beta_BusinessDayConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig.class, com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Builder.class); } // Construct using com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; businessDays_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.shopping.merchant.accounts.v1beta.ShippingSettingsProto .internal_static_google_shopping_merchant_accounts_v1beta_BusinessDayConfig_descriptor; } @java.lang.Override public com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig getDefaultInstanceForType() { return com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig.getDefaultInstance(); } @java.lang.Override public com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig build() { com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig buildPartial() { com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig result = new com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig result) { if (((bitField0_ & 0x00000001) != 0)) { businessDays_ = java.util.Collections.unmodifiableList(businessDays_); bitField0_ = (bitField0_ & ~0x00000001); } result.businessDays_ = businessDays_; } private void buildPartial0( com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig) { return mergeFrom((com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig other) { if (other == com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig.getDefaultInstance()) return this; if (!other.businessDays_.isEmpty()) { if (businessDays_.isEmpty()) { businessDays_ = other.businessDays_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureBusinessDaysIsMutable(); businessDays_.addAll(other.businessDays_); } onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { int tmpRaw = input.readEnum(); ensureBusinessDaysIsMutable(); businessDays_.add(tmpRaw); break; } // case 8 case 10: { int length = input.readRawVarint32(); int oldLimit = input.pushLimit(length); while (input.getBytesUntilLimit() > 0) { int tmpRaw = input.readEnum(); ensureBusinessDaysIsMutable(); businessDays_.add(tmpRaw); } input.popLimit(oldLimit); break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<java.lang.Integer> businessDays_ = java.util.Collections.emptyList(); private void ensureBusinessDaysIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { businessDays_ = new java.util.ArrayList<java.lang.Integer>(businessDays_); bitField0_ |= 0x00000001; } } /** * * * <pre> * Required. Regular business days. * May not be empty. * </pre> * * <code> * repeated .google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday business_days = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return A list containing the businessDays. */ public java.util.List<com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday> getBusinessDaysList() { return new com.google.protobuf.Internal.ListAdapter< java.lang.Integer, com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday>( businessDays_, businessDays_converter_); } /** * * * <pre> * Required. Regular business days. * May not be empty. * </pre> * * <code> * repeated .google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday business_days = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The count of businessDays. */ public int getBusinessDaysCount() { return businessDays_.size(); } /** * * * <pre> * Required. Regular business days. * May not be empty. * </pre> * * <code> * repeated .google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday business_days = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @param index The index of the element to return. * @return The businessDays at the given index. */ public com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday getBusinessDays( int index) { return businessDays_converter_.convert(businessDays_.get(index)); } /** * * * <pre> * Required. Regular business days. * May not be empty. * </pre> * * <code> * repeated .google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday business_days = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @param index The index to set the value at. * @param value The businessDays to set. * @return This builder for chaining. */ public Builder setBusinessDays( int index, com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday value) { if (value == null) { throw new NullPointerException(); } ensureBusinessDaysIsMutable(); businessDays_.set(index, value.getNumber()); onChanged(); return this; } /** * * * <pre> * Required. Regular business days. * May not be empty. * </pre> * * <code> * repeated .google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday business_days = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @param value The businessDays to add. * @return This builder for chaining. */ public Builder addBusinessDays( com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday value) { if (value == null) { throw new NullPointerException(); } ensureBusinessDaysIsMutable(); businessDays_.add(value.getNumber()); onChanged(); return this; } /** * * * <pre> * Required. Regular business days. * May not be empty. * </pre> * * <code> * repeated .google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday business_days = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @param values The businessDays to add. * @return This builder for chaining. */ public Builder addAllBusinessDays( java.lang.Iterable< ? extends com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday> values) { ensureBusinessDaysIsMutable(); for (com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday value : values) { businessDays_.add(value.getNumber()); } onChanged(); return this; } /** * * * <pre> * Required. Regular business days. * May not be empty. * </pre> * * <code> * repeated .google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday business_days = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return This builder for chaining. */ public Builder clearBusinessDays() { businessDays_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. Regular business days. * May not be empty. * </pre> * * <code> * repeated .google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday business_days = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return A list containing the enum numeric values on the wire for businessDays. */ public java.util.List<java.lang.Integer> getBusinessDaysValueList() { return java.util.Collections.unmodifiableList(businessDays_); } /** * * * <pre> * Required. Regular business days. * May not be empty. * </pre> * * <code> * repeated .google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday business_days = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @param index The index of the value to return. * @return The enum numeric value on the wire of businessDays at the given index. */ public int getBusinessDaysValue(int index) { return businessDays_.get(index); } /** * * * <pre> * Required. Regular business days. * May not be empty. * </pre> * * <code> * repeated .google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday business_days = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @param index The index to set the value at. * @param value The enum numeric value on the wire for businessDays to set. * @return This builder for chaining. */ public Builder setBusinessDaysValue(int index, int value) { ensureBusinessDaysIsMutable(); businessDays_.set(index, value); onChanged(); return this; } /** * * * <pre> * Required. Regular business days. * May not be empty. * </pre> * * <code> * repeated .google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday business_days = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @param value The enum numeric value on the wire for businessDays to add. * @return This builder for chaining. */ public Builder addBusinessDaysValue(int value) { ensureBusinessDaysIsMutable(); businessDays_.add(value); onChanged(); return this; } /** * * * <pre> * Required. Regular business days. * May not be empty. * </pre> * * <code> * repeated .google.shopping.merchant.accounts.v1beta.BusinessDayConfig.Weekday business_days = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @param values The enum numeric values on the wire for businessDays to add. * @return This builder for chaining. */ public Builder addAllBusinessDaysValue(java.lang.Iterable<java.lang.Integer> values) { ensureBusinessDaysIsMutable(); for (int value : values) { businessDays_.add(value); } onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.shopping.merchant.accounts.v1beta.BusinessDayConfig) } // @@protoc_insertion_point(class_scope:google.shopping.merchant.accounts.v1beta.BusinessDayConfig) private static final com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig(); } public static com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<BusinessDayConfig> PARSER = new com.google.protobuf.AbstractParser<BusinessDayConfig>() { @java.lang.Override public BusinessDayConfig parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<BusinessDayConfig> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<BusinessDayConfig> getParserForType() { return PARSER; } @java.lang.Override public com.google.shopping.merchant.accounts.v1beta.BusinessDayConfig getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/solr
34,132
solr/core/src/test/org/apache/solr/cloud/TestDynamicFieldNamesIndexCorrectly.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.cloud; import java.io.IOException; import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import org.apache.solr.BaseDistributedSearchTestCase; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.request.QueryRequest; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.SolrInputDocument; import org.hamcrest.core.IsEqual; import org.hamcrest.core.IsIterableContaining; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SolrTestCaseJ4.SuppressSSL // Tests https://issues.apache.org/jira/browse/SOLR-13963 public class TestDynamicFieldNamesIndexCorrectly extends AbstractFullDistribZkTestBase { private static final String COLLECTION = "test"; private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @Test @BaseDistributedSearchTestCase.ShardsFixed(num = 3) public void test() throws Exception { waitForThingsToLevelOut(30, TimeUnit.SECONDS); createCollection(COLLECTION, "conf1", 4, 1); final int numRuns = 10; populateIndex(numRuns); } void populateIndex(int numRuns) throws IOException, SolrServerException { try { for (int i = 0; i < numRuns; i++) { log.debug("Iteration number: {}", i); cloudClient.deleteByQuery(COLLECTION, "*:*"); cloudClient.commit(COLLECTION); final Collection<SolrInputDocument> solrDocs = generateRandomizedFieldDocuments(); addToSolr(solrDocs); final SolrQuery solrQuery = new SolrQuery("*:*"); solrQuery.setRows(solrDocs.size()); final SolrDocumentList resultDocs = getSolrResponse(solrQuery, COLLECTION); log.debug("{}", resultDocs); assertThatDocsHaveCorrectFields(solrDocs, resultDocs); } } finally { cloudClient.close(); } } private void assertThatDocsHaveCorrectFields( final Collection<SolrInputDocument> solrDocs, final SolrDocumentList resultDocs) { assertEquals("Wrong number of docs found", resultDocs.getNumFound(), solrDocs.size()); final Map<Object, SolrDocument> resultMap = resultDocs.stream().collect(Collectors.toMap(doc -> doc.getFieldValue("id"), doc -> doc)); Iterator<SolrInputDocument> it = solrDocs.iterator(); while (it.hasNext()) { final SolrInputDocument inDoc = it.next(); final String id = inDoc.getField("id").getValue().toString(); final SolrDocument resultDoc = resultMap.get(id); final Collection<String> resultFieldNames = resultDoc.getFieldNames(); inDoc .getFieldNames() .forEach( fieldName -> { assertThat( String.format( Locale.ROOT, "Doc %s does not have field %s, it has %s", id, fieldName, resultFieldNames), resultFieldNames, new IsIterableContaining<>(new IsEqual<>(fieldName))); }); } } public SolrDocumentList getSolrResponse(SolrQuery solrQuery, String collection) throws SolrServerException, IOException { final QueryResponse response; SolrDocumentList list = null; final QueryRequest req = new QueryRequest(solrQuery); response = req.process(cloudClient, collection); list = response.getResults(); return list; } private void addToSolr(Collection<SolrInputDocument> solrDocs) throws IOException, SolrServerException { cloudClient.add(COLLECTION, solrDocs.iterator()); cloudClient.commit(COLLECTION); } public static List<SolrInputDocument> generateRandomizedFieldDocuments() { final List<SolrInputDocument> solrDocs = new ArrayList<>(); final Iterator<String> iterator = FIELD_NAMES.iterator(); int id = 0; while (iterator.hasNext()) { solrDocs.add(nextDoc(id++, iterator)); } return solrDocs; } public static SolrInputDocument nextDoc(int id, Iterator<String> iterator) { final SolrInputDocument solrDoc = new SolrInputDocument(); solrDoc.addField("id", id); final String nameField = iterator.next(); solrDoc.addField(nameField, "Somebody"); return solrDoc; } private static final List<String> FIELD_NAMES = Arrays.asList( new String[] { "name_DfsqCIYgwMpJnc_prop_s", "name_VHzHTZWnqGALJJ_prop_s", "name_OyKmIqynBbK_prop_s", "name_JofvOXUMYQs_prop_s", "name_SaAfmgHXbCIUethh_prop_s", "name_CMajAPNHivraqKBmYxH_prop_s", "name_OpJFcSZHuOFVKs_prop_s", "name_fTaolBrXTGpJ_prop_s", "name_hlgpuaRTRmYjBNmzHBI_prop_s", "name_DGSzgfeiMouuTgbaklJ_prop_s", "name_hTAZuAysueB_prop_s", "name_VqztpEqzBCXEhVM_prop_s", "name_CaJSsxLqxhq_prop_s", "name_JjEYNobdJiyAJ_prop_s", "name_GGpLbFvxdFyBH_prop_s", "name_NIfhcAmufHRwaGNuO_prop_s", "name_wRzKYNtwiUapyzjQh_prop_s", "name_UonaDljKBYUMgMV_prop_s", "name_sByosZWJLlrrFYVXaT_prop_s", "name_HKHToAtQQkPMwNyGr_prop_s", "name_HJBQHPKbxHvPGp_prop_s", "name_UtERukPiRHzqv_prop_s", "name_WIevbvmoKJkcr_prop_s", "name_YjoCtbikMRaY_prop_s", "name_OwuVrwcxslmiWMylkuH_prop_s", "name_eEoZobamQfJLad_prop_s", "name_IWkfNtxsTRbuPIT_prop_s", "name_rZphZcqVQN_prop_s", "name_QbePjDfrPkiUySUfSS_prop_s", "name_ABCPaNPQXBwVJh_prop_s", "name_OitLZpkeOXrOAeITlAc_prop_s", "name_GlGQselWNwuHUSPy_prop_s", "name_XDNBBpHaxD_prop_s", "name_NkSQtvNhCwgPxnuRGGK_prop_s", "name_mkYuyjFfWjEb_prop_s", "name_JUOzeuNelNHbQVlj_prop_s", "name_CuzbqxBlEJEnBdeJo_prop_s", "name_GbpIJAqoVP_prop_s", "name_oPozbuiwFXFoCQ_prop_s", "name_QPcamTHGpEgYGW_prop_s", "name_QfgfGrTZZkqIbLq_prop_s", "name_UtkepJfqAPQQZvDnB_prop_s", "name_ShipLvibadhd_prop_s", "name_wAdEXOEAydT_prop_s", "name_YiquTYZxxNsxanQ_prop_s", "name_hJfuWEBCYIdtcixldUy_prop_s", "name_PzYofpLhvtw_prop_s", "name_rhkJFHishBuS_prop_s", "name_GNUoUCaqqfGErM_prop_s", "name_hSrbCrBUEs_prop_s", "name_xJANZEGtTrIXMDLBgL_prop_s", "name_pOhSitCAKl_prop_s", "name_PkBHXUceEgVP_prop_s", "name_fvDrPKkegWr_prop_s", "name_HVzmAutUrUoicr_prop_s", "name_ouFhihsihDk_prop_s", "name_eeFcnImKkXiKXDTIPC_prop_s", "name_NMEsrYgSBoIEwp_prop_s", "name_yqCQGPzCamFqBwLZiiC_prop_s", "name_JlHlxPykBl_prop_s", "name_lYGskGWJfNhnd_prop_s", "name_ifXTlDnYqUmjFNhKOxq_prop_s", "name_uaCtJcjZWu_prop_s", "name_LzSXDKQdhQ_prop_s", "name_TpvZetClsYcJenPCdW_prop_s", "name_NPsQNyfkDCgNus_prop_s", "name_zMZnwFtVnbdlGncBEf_prop_s", "name_dGDCXTxABxh_prop_s", "name_JIOxBoRhiZLD_prop_s", "name_smVTZaCZZMiSmYq_prop_s", "name_VgCZTMfOHpfAlGUjDxT_prop_s", "name_HhtLeCOGJMNLMXFBgI_prop_s", "name_QpzFZXNIpk_prop_s", "name_obTfzXxBoCXpiGFGWuz_prop_s", "name_VrBTsQmfJoqNI_prop_s", "name_QeXnmsrvSYZBtkWwDxs_prop_s", "name_vtvvKPfpTBBBMuMTZZ_prop_s", "name_VvPvDbWJXsXIAUSNWgW_prop_s", "name_BYCAfIaRKVUvHHBIut_prop_s", "name_srwPMPauluyfyM_prop_s", "name_YlrFboTEUfq_prop_s", "name_vIPAkvspnnT_prop_s", "name_XWVkDyVpkZvo_prop_s", "name_tJDzyfWZtOrzwvuw_prop_s", "name_mvfaMcKLduLXcvol_prop_s", "name_OKvQYLTaCWwGTXDboK_prop_s", "name_VkMXjFZGUQgNWDbKbgp_prop_s", "name_IixctxAiJdqQQlPwV_prop_s", "name_LbOxzyxGVrsKyZgCHKi_prop_s", "name_YtJheZqyzPhpuAAitN_prop_s", "name_IsctRhBopyx_prop_s", "name_xrfxhlkidKabA_prop_s", "name_MFqGPFbIOrneplmaOK_prop_s", "name_fXOsAXXtMnLy_prop_s", "name_ATQmfQzgdOlFPuDp_prop_s", "name_rFrgtZZDVFGuHjteUX_prop_s", "name_qcPrtNSRKfBPvtdXWJJ_prop_s", "name_UpInzgFgMlfOuMuffOa_prop_s", "name_cmwSPLLLuiv_prop_s", "name_WDQjhkEHQabWvK_prop_s", "name_BqSJaaLDBTTVy_prop_s", "name_nqXaRkhFXV_prop_s", "name_GJBYZZXOOlyJ_prop_s", "name_khgXzOmSxxrerikblPC_prop_s", "name_uFNMtGvQQJljSgk_prop_s", "name_yoZRduwiqx_prop_s", "name_GqqWeEyYXEwT_prop_s", "name_tzhVSqoPKt_prop_s", "name_ensyGAXGQSuW_prop_s", "name_LQJmrvSWKQHc_prop_s", "name_KPpikIjkpciF_prop_s", "name_mplQAMNcigYEwNEBT_prop_s", "name_idmsrYlJGoizvsllQsW_prop_s", "name_rMPMEsrySqUVwcDaUE_prop_s", "name_febnQEKdThaqhnghZ_prop_s", "name_XxtOzKGvvSguMNS_prop_s", "name_VtFlQvelTPyz_prop_s", "name_PQYUOnhHJsSaqVDH_prop_s", "name_qQEIWMsRNQAV_prop_s", "name_rPPHpYLbLoUiLYQ_prop_s", "name_wZaRlynJFNvWJKjyyuA_prop_s", "name_sOwZhIRXUlCvaqRn_prop_s", "name_omkQRxJuYPLTeB_prop_s", "name_fVJbGrSpMpO_prop_s", "name_wLYQtojRTtWeQfz_prop_s", "name_dlQxbbzWoAEDbRPFy_prop_s", "name_SkYKoVihqWDXnsH_prop_s", "name_whlpGhuMeZA_prop_s", "name_iOsqSwnNKSNrjLmkpvo_prop_s", "name_dWYzrxvJttwv_prop_s", "name_stOcVzqQedeqagmynaG_prop_s", "name_NENunrnlQI_prop_s", "name_HqeTpJDHOsfpawjehIq_prop_s", "name_RPwyjltiltvDOqpsYi_prop_s", "name_znVAkUDVYWMIoLr_prop_s", "name_jTzTSvTRyguN_prop_s", "name_ySeOANIBnMabQvaru_prop_s", "name_SadaPaYJaxkwHkRMuE_prop_s", "name_JQVolDkiGeuvA_prop_s", "name_NtxjSaBccGJWoK_prop_s", "name_WvwitdcFXPUQny_prop_s", "name_JQGUUVnzyMCJs_prop_s", "name_GqDdyBcHznboeW_prop_s", "name_RlRSvAFEykA_prop_s", "name_TvNERqviFBnOCtemES_prop_s", "name_DUlAWwbaslagWbIImdd_prop_s", "name_gWILZCZRlbjBoQdrP_prop_s", "name_ftNrYHWFvhuGYHuJt_prop_s", "name_QYBKgeSLCQeRUX_prop_s", "name_PYUIqToJNDgWASGFr_prop_s", "name_zBZIhwwifmRTOXe_prop_s", "name_hnPUucMPfhUuJoO_prop_s", "name_agZLOYIoOWl_prop_s", "name_SEgmWAjhjJ_prop_s", "name_pUclNPUSiDZtMg_prop_s", "name_LjIrSIDJqoqL_prop_s", "name_vjHbgxEULpsQiZlUaM_prop_s", "name_eymEZtHNKYjWFEUlbR_prop_s", "name_tQmOnPEwkIMJlzPRG_prop_s", "name_ogsTpGUlFLOvLzl_prop_s", "name_jJfXDLSaOuHI_prop_s", "name_tBfQFKUYmaAeR_prop_s", "name_rzFgVahQrXezOIMy_prop_s", "name_qdjFkPulsPpMLXVPp_prop_s", "name_rWetgUNXaoxXIfbPDz_prop_s", "name_OrSAGeTkkrRUygOLG_prop_s", "name_LoeOnHUogQnvFHbvXCQ_prop_s", "name_wCbfoExoqlldz_prop_s", "name_mAyvGeccKbSpO_prop_s", "name_LAlRNXNqtwdF_prop_s", "name_CzQuGtKdZviLIh_prop_s", "name_pkfyyloJeQLCiclF_prop_s", "name_BBabvpGlueqCqEAJq_prop_s", "name_yMyCCNWJarW_prop_s", "name_rXyBgzPnWqnU_prop_s", "name_yjTcYotQfUVXVp_prop_s", "name_iQulShIGGjlJuGtkOk_prop_s", "name_EAMjjKBtOri_prop_s", "name_cKKMdfEVvOY_prop_s", "name_HCgMMUWJhAPUcSYEw_prop_s", "name_QxAiEPSPFcGdpbsAN_prop_s", "name_uRFDixdPAlsNiZ_prop_s", "name_ctffdxcrVBN_prop_s", "name_mdXIbwncmwHgDmfsiAM_prop_s", "name_gKlSaxAfDdYgt_prop_s", "name_juaOrDYjSfvcmkd_prop_s", "name_YadqjaxLPXUpJCIMdNm_prop_s", "name_jlNcOgAYUBoj_prop_s", "name_AKNbQWFRzzYbAhOlqAI_prop_s", "name_JAzAPnrljRhqbNfdoh_prop_s", "name_kXYgLRfqrYiQxRo_prop_s", "name_AfZylgVaZgvaIQgR_prop_s", "name_XaOBvJEVEw_prop_s", "name_hDwJONxcscyJuzYRH_prop_s", "name_SvogOicRPq_prop_s", "name_RIsXETbdCtBuL_prop_s", "name_jOxeorqpGcdkp_prop_s", "name_IBzKXorZDdowJujJkC_prop_s", "name_kWfsavjmSEIyGxeoz_prop_s", "name_DhaoVQSvJZfy_prop_s", "name_dWSNRommSreSW_prop_s", "name_LWqiEKFOMPVklmFwyoX_prop_s", "name_GVazWdylOnyamFiz_prop_s", "name_CcgGFeiwORNbAw_prop_s", "name_mVgxCpHfjhofqaOA_prop_s", "name_nzuAietKmfmjnXalz_prop_s", "name_YzYAGdOoaxwgSh_prop_s", "name_jLMshjzscpgU_prop_s", "name_JaLKPfULNIeWysimJf_prop_s", "name_KehwwGmAULqXtNCrwhX_prop_s", "name_mxpwZrDktTLXUzkdKa_prop_s", "name_bPmedbyCSSjDC_prop_s", "name_LYbCFtmQiC_prop_s", "name_cLrVLzwMcMnAT_prop_s", "name_HeOUpecBxVvHEERlPUk_prop_s", "name_jCVSgiNewmDB_prop_s", "name_jOLtAVRUFrs_prop_s", "name_gfWTsWEVeVSXwGMgUT_prop_s", "name_BPbiEWmyizADxNIV_prop_s", "name_VYSwGIOIarPmGWVKenS_prop_s", "name_QpAHpTcxrzVYYfWYT_prop_s", "name_tOBVdVTRBMmCXfnxrNa_prop_s", "name_iCaKxgfTXuvgCT_prop_s", "name_kdPWzVZHslaijNrKbKU_prop_s", "name_wmJmiiWghggUHmNiQAg_prop_s", "name_ZpzaQMGuMfOjw_prop_s", "name_cgOqMOeYMHJ_prop_s", "name_EnguvcJhre_prop_s", "name_edeevGMabTDek_prop_s", "name_vmJEHidWgTTUvioGhi_prop_s", "name_CHYfwnIHxQzPwEFJ_prop_s", "name_KXpUaenwfjlj_prop_s", "name_eVGHumUijQhFvzGjaV_prop_s", "name_XorPzBArSbSTCHpz_prop_s", "name_RRLESavujqcxblljkn_prop_s", "name_YftgbzYxUNUCMXt_prop_s", "name_IqEDQHVFGIyQSS_prop_s", "name_XbStVPkHwGYmQB_prop_s", "name_JyWCZhERjLOtqw_prop_s", "name_dDiuFMzjhrJGyqqud_prop_s", "name_uCCDpPtxkdQNDq_prop_s", "name_ohZQKMOVeb_prop_s", "name_gBTzxrPwsX_prop_s", "name_RLkUwPFSVjqB_prop_s", "name_CXlPWeBunQDGtBXqo_prop_s", "name_kGvCPheDzjir_prop_s", "name_cvcOAZkaTZsTyrrWxvQ_prop_s", "name_sftNHXiElgbUQxtYDI_prop_s", "name_aqGEmvTBCqdFKyfa_prop_s", "name_myCtzywMPzQyJhHwsEy_prop_s", "name_TmvkTzpWLtPEDUfmg_prop_s", "name_XnYvWLQJdcjdOBmfJ_prop_s", "name_toYeyORNQWA_prop_s", "name_hcWpqATuIiUbyfiHPaJ_prop_s", "name_EAelPZjFpiThB_prop_s", "name_aEfokIQMbKI_prop_s", "name_YMbTCeRRipELjF_prop_s", "name_yIbPmIvnUNFsKaEk_prop_s", "name_PVsusfJldMrTq_prop_s", "name_BhGnYInbCoBcRxbkh_prop_s", "name_LvywGWGeDmCnwYM_prop_s", "name_bJwdGFMfTyRhI_prop_s", "name_durkyUrNKHx_prop_s", "name_RdeZaAlmttQzNDZCb_prop_s", "name_VdzHkraZKezBjY_prop_s", "name_rAhOeyHbDuW_prop_s", "name_SNzylGssYOA_prop_s", "name_vHqZyqgwfD_prop_s", "name_DPnKKQlfkn_prop_s", "name_PQFtvTrPezVRLL_prop_s", "name_YkOCraZfkuCyx_prop_s", "name_glGgplQXQzqaHbT_prop_s", "name_OqpvyNHqeQUANE_prop_s", "name_EYRKsQekVHcYlWf_prop_s", "name_RFuZbCWIOu_prop_s", "name_ekHWLiTVyNjYdl_prop_s", "name_vezpACcbFw_prop_s", "name_oQQXcPzeODviDC_prop_s", "name_wZkyzXscqPGWiEzwR_prop_s", "name_eYywOQdxMbAwHNC_prop_s", "name_gvEXKFXEAQMaYm_prop_s", "name_vofoikKFpZsOZfY_prop_s", "name_aXNocadbQQO_prop_s", "name_pzzPuuliByDjLm_prop_s", "name_dIOSQFOVldP_prop_s", "name_sbplpizxCQWndsBpoU_prop_s", "name_uogQaerZVBnV_prop_s", "name_WsDhwfdJivmMKO_prop_s", "name_RjJjIrPGWGFgCbT_prop_s", "name_sKymsAbmFqwyzKRSH_prop_s", "name_wIHDafXfvOunVi_prop_s", "name_pWEWMRdqgvuGdqwztct_prop_s", "name_aFDHZXHKgnVo_prop_s", "name_dAdcQYTvmRZ_prop_s", "name_zQsaOcogPYNqypDPYjS_prop_s", "name_KOtJNECCHjLxKZqHZ_prop_s", "name_wfdxykXSBRcrfUv_prop_s", "name_kGJgFephxkeH_prop_s", "name_peispafiMLgmE_prop_s", "name_CJTnCuCsOSCvj_prop_s", "name_xpOyokirtcJoFPKyH_prop_s", "name_nhmhQePxBvNT_prop_s", "name_vPxdJTwHkzDdvaK_prop_s", "name_dAGyfZWSkTaCCt_prop_s", "name_CYaZJGFolJqNhmKgsV_prop_s", "name_vboqCHtthOPMRHU_prop_s", "name_fqrgYweKbBNzlYJk_prop_s", "name_SwOSQemwasu_prop_s", "name_dRDJlPUxSgvIS_prop_s", "name_DYjfbnkMhnMyL_prop_s", "name_REAirSXdUlsq_prop_s", "name_aPLpQwhWGCcjk_prop_s", "name_LWlbDafEriuRGmJYW_prop_s", "name_bTFLYGqAHYvnpFvzd_prop_s", "name_emIonaQRdfsjmVCjUn_prop_s", "name_RdMOfMWlqKmKuxYawG_prop_s", "name_NmvxkGBDyJ_prop_s", "name_veeKFlgaBqTXINdlbi_prop_s", "name_JEMSCgBWKwpd_prop_s", "name_RKsEwiClkYAENVkO_prop_s", "name_QSfmaqphip_prop_s", "name_DhcOPbKnWrv_prop_s", "name_AhEfQCMTWtrdjBV_prop_s", "name_EtAMDtJVTd_prop_s", "name_qVxNUttsduupj_prop_s", "name_BeFWYHBfnSNqVEPz_prop_s", "name_wPevXszAQZWZwe_prop_s", "name_oJYcnxrshAkjJYnXyn_prop_s", "name_nffSJxMrhrIlQw_prop_s", "name_ZrHpSfuzHHIin_prop_s", "name_rdMnHMmgEQaGLmXRPiD_prop_s", "name_huldbnqnXwop_prop_s", "name_jduhQpDoYv_prop_s", "name_NBOKEducirzNsSSy_prop_s", "name_xJzWfJrMIY_prop_s", "name_VMZbxqOHwfQDaGT_prop_s", "name_syUXJprVoLTZYebB_prop_s", "name_prZEUbNoTysB_prop_s", "name_RfvJRIoQeGSu_prop_s", "name_BBshBWkaLopZ_prop_s", "name_YOAVKRdkRspIVaLva_prop_s", "name_RrTctdPJnMoMw_prop_s", "name_TEoYvqSeBmaUHflB_prop_s", "name_IwsxROIVgJ_prop_s", "name_ktQwKjuCLYAmOnyj_prop_s", "name_MZrmJkYFkHsU_prop_s", "name_bdagQHBFmoIo_prop_s", "name_zmoxFeHMBwkyEO_prop_s", "name_wenNdlQvlHItqflx_prop_s", "name_XprqFpXiYoHzEfd_prop_s", "name_ogZQmtfQOfvP_prop_s", "name_QOsBJGNDUzbHWHrQ_prop_s", "name_jfrfWIuCWSFXQumtm_prop_s", "name_VFWIKhommZaTVuzphSb_prop_s", "name_RVhGwEvGjdOnzR_prop_s", "name_FQlxoQLZIkZCyfiVx_prop_s", "name_MPbQJjgBGMUR_prop_s", "name_SbbTGVASSkYHiNwV_prop_s", "name_MntYiMNrHQ_prop_s", "name_yjcZRVwITRLXb_prop_s", "name_aSKYqqhexuo_prop_s", "name_TfzoLKDlIhDun_prop_s", "name_KeKTrXfMFglbN_prop_s", "name_iIdfUsKoIlf_prop_s", "name_FPQqtNlVCLSgwgNhf_prop_s", "name_PkYUzUADmq_prop_s", "name_nXAJwIhWfESKdZ_prop_s", "name_faXLvuLCiq_prop_s", "name_zarHYCyYIr_prop_s", "name_sowzONSDytjGEZuv_prop_s", "name_zyWCVstnSnLz_prop_s", "name_anncXfqvveOWy_prop_s", "name_TbvIhvzhkLAXm_prop_s", "name_tBWzDGmZocLjPRFMIF_prop_s", "name_JgCrqPcPNiVdrRRbf_prop_s", "name_FBtKmopbwHOPPoMjDRA_prop_s", "name_BOEyOhYKOUSQFQPxwDL_prop_s", "name_uVosPVYbIF_prop_s", "name_eQOiKlnUNZ_prop_s", "name_lYYQBjpaIjMXYRH_prop_s", "name_FyFvEcZfRrnx_prop_s", "name_rNiSOAGXkMPBY_prop_s", "name_tylcSBADvLvAKkzv_prop_s", "name_KvoxbuKdiqLGUo_prop_s", "name_FDZfmbIjXBiKoeWImxj_prop_s", "name_NULbsIjjyysWdXGAxy_prop_s", "name_RVtYeHUXaxVSBJUCX_prop_s", "name_jlNFgVZgDAFKqHxR_prop_s", "name_uIhSJwItLLKHa_prop_s", "name_lEMFtKhGZjrjnLlW_prop_s", "name_avEoREwfXmm_prop_s", "name_IiXRqkZmvNAqf_prop_s", "name_dKzqqsjZzTgxHTpZiA_prop_s", "name_jilMmwVsaTkUgJ_prop_s", "name_xYNTFgaEEluQ_prop_s", "name_WFkNIiGzzfHous_prop_s", "name_ztXfmQXTTNuXjPSCYC_prop_s", "name_jyGvFWOSfs_prop_s", "name_jRpEJIPQzYKLR_prop_s", "name_FIUqxuPiWpMMTuZ_prop_s", "name_ttkqBQpFtwHL_prop_s", "name_bqYmgceeoJZSZbW_prop_s", "name_ctRkHATHrFlnEKmSRLd_prop_s", "name_wZorXwBeanELgv_prop_s", "name_jXiyBDjpCKe_prop_s", "name_sRvLkwUSBIsrt_prop_s", "name_yEHNabvaqyAGa_prop_s", "name_cwmgaKpzluwJOBvphxY_prop_s", "name_cOXSTpgjzFEjfbJPVM_prop_s", "name_ikkFRyBgGfWbg_prop_s", "name_dEKLFEgvjHFo_prop_s", "name_HJZRtrGjmPlc_prop_s", "name_hMpazPhQVkTUE_prop_s", "name_VKnOJLBqMVzkxD_prop_s", "name_zKPBHVcuULlMTRy_prop_s", "name_LzbMOhdcPnvcF_prop_s", "name_euHYSgnsustyR_prop_s", "name_IvuYSeiYicgpmboJW_prop_s", "name_yGrlGoiNHNIt_prop_s", "name_tpDceZWQvat_prop_s", "name_iaDXoHUSwG_prop_s", "name_fJXmNNxUHggajGl_prop_s", "name_qdzxqokVXHjNBORhW_prop_s", "name_DxoLvhVEbDcXb_prop_s", "name_bFHhHakPJd_prop_s", "name_hVrFxShinIeN_prop_s", "name_XKPhskHDDg_prop_s", "name_JjbLlVDrWA_prop_s", "name_xOJcUebWcopYLGKGYhH_prop_s", "name_VJTvLToaSyFUm_prop_s", "name_civISGYkrfwD_prop_s", "name_kSPizRJqJZ_prop_s", "name_gmmUBdiHNFVBzpqukdi_prop_s", "name_jSGXVJsJPmESYy_prop_s", "name_AbyytYHuJyn_prop_s", "name_YGNtCMfmLqE_prop_s", "name_siCxrMEiFjwoEqfcc_prop_s", "name_yWlyMAenZiTylpYzW_prop_s", "name_XWOZYkmhzHmOF_prop_s", "name_FlCjaUETSllVHEwmoR_prop_s", "name_ZaXOAZXrKGXs_prop_s", "name_wveujGHeUQ_prop_s", "name_KhSPQFkCmHuScj_prop_s", "name_cBXYezKthhDfoVOnIo_prop_s", "name_rOVAKNTsPprlUDDlCa_prop_s", "name_fgWaLCfjuDnbH_prop_s", "name_ekxAMazIGJgLCCMox_prop_s", "name_iCbNCfPSYKZ_prop_s", "name_rULXErnmZoIMARdsEL_prop_s", "name_MjtGLUmEVFFRKydbJ_prop_s", "name_DzLQfXBPWppyPjj_prop_s", "name_xxNOkzscmZ_prop_s", "name_VAiCBAZUeEnA_prop_s", "name_ftdPuTtNtpLoRmtqQB_prop_s", "name_ebNmBmAGnjhDwEMkWN_prop_s", "name_eZVGYMBDaN_prop_s", "name_hxykcxgsIAfxfupix_prop_s", "name_XEDImtbSKXAeLyEop_prop_s", "name_yOxGFWeePpUIc_prop_s", "name_RzqLTLciLlaundr_prop_s", "name_UtCQadSTlNF_prop_s", "name_ORSaWMOVQhZZWxkv_prop_s", "name_qCgQYTeGGSJf_prop_s", "name_AlIZOvRFcZPbZwU_prop_s", "name_vdqdlYetlciyb_prop_s", "name_dmJIAXeXYjJhwacpkLZ_prop_s", "name_mCOjAATZrgxJ_prop_s", "name_RJsQfzfqbZGXp_prop_s", "name_XMiImCbTVJAoKSfEo_prop_s", "name_kDCCVcALrCx_prop_s", "name_VmkGYGugHqaA_prop_s", "name_jvZilzavGvyq_prop_s", "name_CCDtRrXmOTmc_prop_s", "name_UGbllGSvifotji_prop_s", "name_JOfVgyuwzbIriJg_prop_s", "name_cJCGLUbaZcrJXGCcZyE_prop_s", "name_yKXkqdoNhbkSPSBUv_prop_s", "name_QSrzBIUBQVUrdzM_prop_s", "name_ulgjGcvaqh_prop_s", "name_JaQtXbimGQW_prop_s", "name_xYrQHDXvVbzq_prop_s", "name_wSxZHthLVwKjuBWR_prop_s", "name_mEefJyzMBqdSbQ_prop_s", "name_GGJivsaoxiirx_prop_s", "name_CACALOHPQCrf_prop_s", "name_GBrQDvusDOWuvClhYa_prop_s", "name_vqZfUUBIkd_prop_s", "name_mXGYvfrccKBFymNB_prop_s", "name_wZhiLMSbHcweTy_prop_s", "name_fFPlXgVZKVHosY_prop_s", "name_wAFjlOGjIQJBOBgsg_prop_s", "name_diTnXDoUYaBiVnc_prop_s", "name_DyufnSBeLVPwDSPBi_prop_s", "name_TlGahXVDZeZdT_prop_s", "name_jDnUlzuoxtWKe_prop_s", "name_MCnYrKrvAa_prop_s", "name_HZtNEzgsgQpgPULw_prop_s", "name_sZZJIdHfiEnPvbgdoK_prop_s", "name_aehzQLgzPf_prop_s", "name_uOhtALYSlV_prop_s", "name_tLmeLHpBwzP_prop_s", "name_tMDTGUzelwUQrqPaN_prop_s", "name_wMUhdfLFRaXOOLeKL_prop_s", "name_XqpDZerjeDqrzzhsw_prop_s", "name_zrmxpkEbOGPIEzqM_prop_s", "name_dEOcHvQShe_prop_s", "name_QbZyQMReoJJG_prop_s", "name_gsYPhgkPfTOiPDEAVD_prop_s", "name_rbNswYkvBvqmA_prop_s", "name_LNmSBXRpRnvKFpqbT_prop_s", "name_rHuaoaqVkaAz_prop_s", "name_zjeWWuFoacJuuxETiD_prop_s", "name_BhAKoPqFSVlA_prop_s", "name_JjjcumppysyXsTldO_prop_s", "name_bMjIQaLeLZ_prop_s", "name_ujEVEGDoYpXmg_prop_s", "name_xeFvZrHvmONeM_prop_s", "name_vgNWlNzSOGo_prop_s", "name_AvdOuoFmghMsCklVua_prop_s", "name_KWDQpWtFvwxJWNNj_prop_s", "name_llVSmiZlLiNdippBgzm_prop_s", "name_BLmEHGblGXQHbywh_prop_s", "name_rfoqACJQVUHBuJZDjdx_prop_s", "name_kDQGbSJbyD_prop_s", "name_PLSTfOtIQx_prop_s", "name_NiIIwJLdfGlAcwzfT_prop_s", "name_kPGazwKspZmewuiaZVB_prop_s", "name_cuhoZBMdAMi_prop_s", "name_XbZqGnehJGT_prop_s", "name_yfCAGBvZufEM_prop_s", "name_GHuXtHZtwTY_prop_s", "name_jFtdTtkJbvHrsgGQ_prop_s", "name_iBcSefLjrrEyHOfqpx_prop_s", "name_GBJRIYHEbuwJmzLdxtm_prop_s", "name_VPiXQrwycQPT_prop_s", "name_XJstMKshDibmiHoZMd_prop_s", "name_wiGBycapxeIXtTrvW_prop_s", "name_rJPHaUEbgraQ_prop_s", "name_rGxylqGVHinLjO_prop_s", "name_GXeVgdEWBmv_prop_s", "name_HnYKYhHxZlpGIwdIVQ_prop_s", "name_FIOSdBvncmSeMiH_prop_s", "name_FCOLTVOghkVRBXhh_prop_s", "name_iZknWYaTKn_prop_s", "name_bQhwLkthwP_prop_s", "name_GJKLUOxgFtxMdbpeN_prop_s", "name_uCUdhLIXQKheDpQMB_prop_s", "name_knArOLgcybDsJsor_prop_s", "name_vgoNwqvzshUKeOPUSYk_prop_s", "name_YzIaNlWjqBqwoJcA_prop_s", "name_hDYFmiHwhPCL_prop_s", "name_fEAcVIqAfAIXehyOoGU_prop_s", "name_KwUSxCHFWiXOTqk_prop_s", "name_KRUSuEYGaQgWJmnGm_prop_s", "name_PpWwLjvaGoR_prop_s", "name_skVILQlxWYQowRGw_prop_s", "name_bcbBLimvTIGQp_prop_s", "name_vYQrLudbiua_prop_s", "name_nuDloTTlKFpeoV_prop_s", "name_RhbixfcpVSMOPfK_prop_s", "name_fRRDlXHyOAGhwJ_prop_s", "name_PGTPucoCVbz_prop_s", "name_TTOIQLLAUIMUqE_prop_s", "name_kXJQwDYAdc_prop_s", "name_VlYMFsIAfv_prop_s", "name_OThsmraSBTydoPfu_prop_s", "name_WhEccUbWgvObJoS_prop_s", "name_bxJtNPHBleHNhfat_prop_s", "name_aLJcfxHporPCXBiF_prop_s", "name_BbBwSzFKovNubMsv_prop_s", "name_ZoaCLmepYLkTCLddGPn_prop_s", "name_jYflHPNvrnzB_prop_s", "name_SGqftBmurcbCEMn_prop_s", "name_PqiMioFAtKOjkan_prop_s", "name_ZeazKbMtVMB_prop_s", "name_sgQyAUHsEg_prop_s", "name_EAIUmQCWbiQbZI_prop_s", "name_FNcVUavfHz_prop_s", "name_ViUmtAvjlwKCeFb_prop_s", "name_FYjubApKwXxQnNUIxB_prop_s", "name_WLPEmGTQAisfXsq_prop_s", "name_CyrnsHyuyFBx_prop_s", "name_zMGfDpWzqfZMAF_prop_s", "name_NILxzDPIbmoxOwQtuQP_prop_s", "name_JJCEpGqGVjJa_prop_s", "name_CtTFvRpyzKguMdZ_prop_s", "name_qiGhKGSMzMMp_prop_s", "name_QLUJBWXryHb_prop_s", "name_sMJePABydcVoQk_prop_s", "name_tfpbMNRLaXuyLuexLGy_prop_s", "name_rYoMoMLacxWlS_prop_s", "name_vWDCkyzmEi_prop_s", "name_RkKjeQtYycWC_prop_s", "name_xfDfirUchdkxKIDJOt_prop_s", "name_mEWCBmdvyhON_prop_s", "name_uLtsxsjXOGQZkCChL_prop_s", "name_UYjWVNCvGE_prop_s", "name_JJxhmSNcmsN_prop_s", "name_fYqlzMmhQdoecsvx_prop_s", "name_MxXoomSYegfmoEy_prop_s", "name_hKITNVMXrrjaeFpwfh_prop_s", "name_bhTKjWsdWDdonwi_prop_s", "name_XWjLvIfzoorQRqBmo_prop_s", "name_UqLAinOoswSeBVh_prop_s", "name_mQzjXAidhWpqqG_prop_s", "name_ytxaqwLBrvJYolqi_prop_s", "name_daTgAYVVJQsmO_prop_s", "name_xCmENbUDoiZ_prop_s", "name_eZTpxzkHHLjKUGuV_prop_s", "name_XdJsjHWRNMnQeC_prop_s", "name_tTSOfpdJTOsZkcTH_prop_s", "name_ridXaoCaPNoyFx_prop_s", "name_HVIFmePdpnAcvjba_prop_s", "name_osQVkiJtkHiBVP_prop_s", "name_ikTrXQFmMpAw_prop_s", "name_CtPYdlsrBtsuRkU_prop_s", "name_BbnERLXULZsX_prop_s", "name_FUGsEWgJtiLxWUEadSE_prop_s", "name_babUPIRWxOJTyQqt_prop_s", "name_zqaORMkAJlhSf_prop_s", "name_CeRKgIekQl_prop_s", "name_sHuCaTJIqfPYqpDILZe_prop_s", "name_wMsJtSzGDCJ_prop_s", "name_NprXcFInRsRGK_prop_s", "name_kruVqZBPAizaB_prop_s", "name_OJaYkRoxWwARAGa_prop_s", "name_fQeYEMbbBnnmbwS_prop_s", "name_jHwrTEPSNe_prop_s", "name_tGtgZLRbdYQHqFyI_prop_s", "name_bYUODaraQABQMuiVwa_prop_s", "name_LsdkDDyTgtLnQv_prop_s", "name_WmBvbHCQqNznHXDM_prop_s", "name_yCpJZfnNvJt_prop_s", "name_nxEaZdhiNOaCgHXu_prop_s", "name_YlsRbOaHrwrjw_prop_s", "name_wEzIAxJlGY_prop_s", "name_wgtEQdJDFUZMRCtKuvN_prop_s", "name_NlrMACPMAY_prop_s", "name_lyJIPhQYMXgUIOe_prop_s", "name_XDMUiHILIfVcRVS_prop_s", "name_CReyJWfRLOR_prop_s", "name_AySGHgndHRfNrHYs_prop_s", "name_vMKLAoTfxxBNIVC_prop_s", "name_UiEpdEsyrJWBVZN_prop_s", "name_ZDESHNBkigMNhIdqjqB_prop_s", "name_MeDLRbvcZrLgrXD_prop_s", "name_wtkpdHkreDpFK_prop_s", "name_fdKDEadJGWkIhpT_prop_s", "name_ozeAMJPgTwwzrTmu_prop_s", "name_CNivtYVLtjVlr_prop_s", "name_yglTIePAOb_prop_s", "name_UTRKTVkvhpJKEE_prop_s", "name_OmHylNTQXDRUKEC_prop_s", "name_JiZnnChtUFMUrGi_prop_s", "name_WoCxWZkHoaQu_prop_s", "name_AnNVbPPNuzjqFnL_prop_s", "name_kLXLsnBnOoySgS_prop_s", "name_UhbzdIMuOFGDaNiXEv_prop_s", "name_eWOWltJaJILIzCH_prop_s", "name_AIMKZYfLAHIs_prop_s", "name_pDzYoeEDPjsvqJ_prop_s", "name_eOACrLtTfxoyRlU_prop_s", "name_WauBOgBeapqDugJyyp_prop_s", "name_uwzXxeCxlcsKrNwpPkm_prop_s", "name_zZYjhAOxmRWjICXyd_prop_s", "name_jyeCWKaQnlrYHkzwSH_prop_s", "name_SesSMUttyVjUJaGKX_prop_s", "name_HBOChmtthCl_prop_s", "name_CxlLbdpOOfXwL_prop_s", "name_MiFBPgcnSSYFJdyju_prop_s", "name_rKEAVEpJXKWbRYM_prop_s", "name_xLQKEwIRCsGTqWzRf_prop_s" }); }
apache/dolphinscheduler
35,502
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessServiceImpl.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.service.process; import static java.util.stream.Collectors.toSet; import static org.apache.dolphinscheduler.common.constants.CommandKeyConstants.CMD_PARAM_SUB_WORKFLOW_DEFINITION_CODE; import org.apache.dolphinscheduler.common.constants.Constants; import org.apache.dolphinscheduler.common.enums.AuthorizationType; import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.TimeoutFlag; import org.apache.dolphinscheduler.common.enums.WorkflowExecutionStatus; import org.apache.dolphinscheduler.common.graph.DAG; import org.apache.dolphinscheduler.common.model.TaskNodeRelation; import org.apache.dolphinscheduler.common.utils.CodeGenerateUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.Cluster; import org.apache.dolphinscheduler.dao.entity.DagData; import org.apache.dolphinscheduler.dao.entity.DataSource; import org.apache.dolphinscheduler.dao.entity.Schedule; import org.apache.dolphinscheduler.dao.entity.TaskDefinition; import org.apache.dolphinscheduler.dao.entity.TaskDefinitionLog; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.entity.Tenant; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.WorkflowDefinition; import org.apache.dolphinscheduler.dao.entity.WorkflowDefinitionLog; import org.apache.dolphinscheduler.dao.entity.WorkflowInstance; import org.apache.dolphinscheduler.dao.entity.WorkflowInstanceRelation; import org.apache.dolphinscheduler.dao.entity.WorkflowTaskRelation; import org.apache.dolphinscheduler.dao.entity.WorkflowTaskRelationLog; import org.apache.dolphinscheduler.dao.mapper.ClusterMapper; import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper; import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper; import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionLogMapper; import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.TenantMapper; import org.apache.dolphinscheduler.dao.mapper.UserMapper; import org.apache.dolphinscheduler.dao.mapper.WorkflowDefinitionLogMapper; import org.apache.dolphinscheduler.dao.mapper.WorkflowDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.WorkflowInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.WorkflowInstanceRelationMapper; import org.apache.dolphinscheduler.dao.mapper.WorkflowTaskRelationLogMapper; import org.apache.dolphinscheduler.dao.mapper.WorkflowTaskRelationMapper; import org.apache.dolphinscheduler.dao.repository.TaskDefinitionDao; import org.apache.dolphinscheduler.dao.repository.TaskDefinitionLogDao; import org.apache.dolphinscheduler.dao.repository.TaskInstanceDao; import org.apache.dolphinscheduler.dao.repository.WorkflowInstanceDao; import org.apache.dolphinscheduler.plugin.task.api.parameters.SubWorkflowParameters; import org.apache.dolphinscheduler.plugin.task.api.parameters.TaskTimeoutParameter; import org.apache.dolphinscheduler.service.expand.CuringParamsService; import org.apache.dolphinscheduler.service.model.TaskNode; import org.apache.dolphinscheduler.service.utils.ClusterConfUtils; import org.apache.dolphinscheduler.service.utils.DagHelper; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.base.Strings; import com.google.common.collect.Lists; /** * process relative dao that some mappers in this. */ @Component @Slf4j public class ProcessServiceImpl implements ProcessService { @Autowired private UserMapper userMapper; @Autowired private WorkflowDefinitionMapper workflowDefinitionMapper; @Autowired private WorkflowDefinitionLogMapper workflowDefinitionLogMapper; // todo replace with workflowInstanceDao @Autowired private WorkflowInstanceMapper workflowInstanceMapper; @Autowired private WorkflowInstanceDao workflowInstanceDao; @Autowired private TaskDefinitionDao taskDefinitionDao; @Autowired private TaskInstanceDao taskInstanceDao; @Autowired private TaskDefinitionLogDao taskDefinitionLogDao; @Autowired private DataSourceMapper dataSourceMapper; @Autowired private WorkflowInstanceRelationMapper workflowInstanceRelationMapper; @Autowired private ScheduleMapper scheduleMapper; @Autowired private TenantMapper tenantMapper; @Autowired private TaskDefinitionMapper taskDefinitionMapper; @Autowired private TaskDefinitionLogMapper taskDefinitionLogMapper; @Autowired private WorkflowTaskRelationMapper workflowTaskRelationMapper; @Autowired private WorkflowTaskRelationLogMapper workflowTaskRelationLogMapper; @Autowired private ClusterMapper clusterMapper; @Autowired private CuringParamsService curingGlobalParamsService; /** * find workflow instance detail by id * * @param workflowInstanceId workflowInstanceId * @return workflow instance */ @Override public Optional<WorkflowInstance> findWorkflowInstanceDetailById(int workflowInstanceId) { return Optional.ofNullable(workflowInstanceMapper.queryDetailById(workflowInstanceId)); } /** * find workflow instance by id * * @param workflowInstanceId workflowInstanceId * @return workflow instance */ @Override public WorkflowInstance findWorkflowInstanceById(int workflowInstanceId) { return workflowInstanceMapper.selectById(workflowInstanceId); } /** * find workflow define by code and version. * * @param workflowDefinitionCode workflowDefinitionCode * @return workflow definition */ @Override public WorkflowDefinition findWorkflowDefinition(Long workflowDefinitionCode, int workflowDefinitionVersion) { WorkflowDefinition workflowDefinition = workflowDefinitionMapper.queryByCode(workflowDefinitionCode); if (workflowDefinition == null || workflowDefinition.getVersion() != workflowDefinitionVersion) { workflowDefinition = workflowDefinitionLogMapper.queryByDefinitionCodeAndVersion(workflowDefinitionCode, workflowDefinitionVersion); if (workflowDefinition != null) { workflowDefinition.setId(0); } } return workflowDefinition; } /** * delete work workflow instance by id * * @param workflowInstanceId workflowInstanceId * @return delete workflow instance result */ @Override public int deleteWorkflowInstanceById(int workflowInstanceId) { return workflowInstanceMapper.deleteById(workflowInstanceId); } /** * recursive query sub workflow definition id by parent id. * * @param parentCode parentCode */ @Override public List<Long> findAllSubWorkflowDefinitionCode(long parentCode) { List<TaskDefinition> taskNodeList = taskDefinitionDao.getTaskDefinitionListByDefinition(parentCode); if (CollectionUtils.isEmpty(taskNodeList)) { return Collections.emptyList(); } List<Long> subWorkflowDefinitionCodes = new ArrayList<>(); for (TaskDefinition taskNode : taskNodeList) { String parameter = taskNode.getTaskParams(); ObjectNode parameterJson = JSONUtils.parseObject(parameter); if (parameterJson.get(CMD_PARAM_SUB_WORKFLOW_DEFINITION_CODE) != null) { SubWorkflowParameters subProcessParam = JSONUtils.parseObject(parameter, SubWorkflowParameters.class); long subWorkflowDefinitionCode = subProcessParam.getWorkflowDefinitionCode(); subWorkflowDefinitionCodes.add(subWorkflowDefinitionCode); subWorkflowDefinitionCodes.addAll(findAllSubWorkflowDefinitionCode(subWorkflowDefinitionCode)); } } return subWorkflowDefinitionCodes; } /** * Get workflow runtime tenant * <p> * the workflow provides a tenant and uses the provided tenant; * when no tenant is provided or the provided tenant is the default tenant, \ * the user's tenant created by the workflow is used * * @param tenantCode tenantCode * @param userId userId * @return tenant code */ @Override public String getTenantForWorkflow(String tenantCode, int userId) { if (StringUtils.isNoneBlank(tenantCode) && !Constants.DEFAULT.equals(tenantCode)) { return tenantCode; } if (userId == 0) { return null; } User user = userMapper.selectById(userId); Tenant tenant = tenantMapper.queryById(user.getTenantId()); return tenant.getTenantCode(); } /** * find sub workflow instance * * @param parentWorkflowInstanceId parentWorkflowInstanceId * @param parentTaskId parentTaskId * @return workflow instance */ @Override public WorkflowInstance findSubWorkflowInstance(Integer parentWorkflowInstanceId, Integer parentTaskId) { WorkflowInstance workflowInstance = null; WorkflowInstanceRelation workflowInstanceRelation = workflowInstanceRelationMapper.queryByParentId(parentWorkflowInstanceId, parentTaskId); if (workflowInstanceRelation == null || workflowInstanceRelation.getWorkflowInstanceId() == 0) { return workflowInstance; } workflowInstance = findWorkflowInstanceById(workflowInstanceRelation.getWorkflowInstanceId()); return workflowInstance; } /** * find parent workflow instance * * @param subWorkflowInstanceId subWorkflowId * @return workflow instance */ @Override public WorkflowInstance findParentWorkflowInstance(Integer subWorkflowInstanceId) { WorkflowInstance workflowInstance = null; WorkflowInstanceRelation workflowInstanceRelation = workflowInstanceRelationMapper.queryBySubWorkflowId(subWorkflowInstanceId); if (workflowInstanceRelation == null || workflowInstanceRelation.getWorkflowInstanceId() == 0) { return workflowInstance; } workflowInstance = findWorkflowInstanceById(workflowInstanceRelation.getParentWorkflowInstanceId()); return workflowInstance; } /** * query Schedule by workflowDefinitionCode * * @param workflowDefinitionCode workflowDefinitionCode * @see Schedule */ @Override public List<Schedule> queryReleaseSchedulerListByWorkflowDefinitionCode(long workflowDefinitionCode) { return scheduleMapper.queryReleaseSchedulerListByWorkflowDefinitionCode(workflowDefinitionCode); } /** * find data source by id * * @param id id * @return datasource */ @Override public DataSource findDataSourceById(int id) { return dataSourceMapper.selectById(id); } /** * get user by user id * * @param userId user id * @return User */ @Override public User getUserById(int userId) { return userMapper.selectById(userId); } /** * list unauthorized * * @param userId user id * @param needChecks data source id array * @return unauthorized */ @Override public <T> List<T> listUnauthorized(int userId, T[] needChecks, AuthorizationType authorizationType) { List<T> resultList = new ArrayList<>(); if (Objects.nonNull(needChecks) && needChecks.length > 0) { Set<T> originResSet = new HashSet<>(Arrays.asList(needChecks)); switch (authorizationType) { case DATASOURCE: Set<Integer> authorizedDatasources = dataSourceMapper.listAuthorizedDataSource(userId, needChecks) .stream().map(DataSource::getId).collect(toSet()); originResSet.removeAll(authorizedDatasources); break; default: break; } resultList.addAll(originResSet); } return resultList; } /** * switch workflow definition version to workflow definition log version */ @Override public int switchVersion(WorkflowDefinition workflowDefinition, WorkflowDefinitionLog workflowDefinitionLog) { if (null == workflowDefinition || null == workflowDefinitionLog) { return Constants.DEFINITION_FAILURE; } workflowDefinitionLog.setId(workflowDefinition.getId()); workflowDefinitionLog.setReleaseState(ReleaseState.OFFLINE); workflowDefinitionLog.setFlag(Flag.YES); int result = workflowDefinitionMapper.updateById(workflowDefinitionLog); if (result > 0) { result = switchWorkflowTaskRelationVersion(workflowDefinitionLog); if (result <= 0) { return Constants.EXIT_CODE_FAILURE; } } return result; } @Override public int switchWorkflowTaskRelationVersion(WorkflowDefinition workflowDefinition) { List<WorkflowTaskRelation> workflowTaskRelationList = workflowTaskRelationMapper.queryByWorkflowDefinitionCode(workflowDefinition.getCode()); if (!workflowTaskRelationList.isEmpty()) { workflowTaskRelationMapper.deleteByWorkflowDefinitionCode(workflowDefinition.getProjectCode(), workflowDefinition.getCode()); } List<WorkflowTaskRelation> workflowTaskRelationListFromLog = workflowTaskRelationLogMapper .queryByWorkflowCodeAndVersion(workflowDefinition.getCode(), workflowDefinition.getVersion()).stream() .map(WorkflowTaskRelation::new).collect(Collectors.toList()); int batchInsert = workflowTaskRelationMapper.batchInsert(workflowTaskRelationListFromLog); if (batchInsert == 0) { return Constants.EXIT_CODE_FAILURE; } else { int result = 0; for (WorkflowTaskRelation taskRelation : workflowTaskRelationListFromLog) { int switchResult = switchTaskDefinitionVersion(taskRelation.getPostTaskCode(), taskRelation.getPostTaskVersion()); if (switchResult != Constants.EXIT_CODE_FAILURE) { result++; } } return result; } } @Override public int switchTaskDefinitionVersion(long taskCode, int taskVersion) { TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(taskCode); if (taskDefinition == null) { return Constants.EXIT_CODE_FAILURE; } if (taskDefinition.getVersion() == taskVersion) { return Constants.EXIT_CODE_SUCCESS; } TaskDefinitionLog taskDefinitionUpdate = taskDefinitionLogMapper.queryByDefinitionCodeAndVersion(taskCode, taskVersion); if (taskDefinitionUpdate == null) { return Constants.EXIT_CODE_FAILURE; } taskDefinitionUpdate.setUpdateTime(new Date()); taskDefinitionUpdate.setId(taskDefinition.getId()); return taskDefinitionMapper.updateById(taskDefinitionUpdate); } @Override public int saveTaskDefine(User operator, long projectCode, List<TaskDefinitionLog> taskDefinitionLogs, Boolean syncDefine) { Date now = new Date(); List<TaskDefinitionLog> newTaskDefinitionLogs = new ArrayList<>(); List<TaskDefinitionLog> updateTaskDefinitionLogs = new ArrayList<>(); for (TaskDefinitionLog taskDefinitionLog : taskDefinitionLogs) { taskDefinitionLog.setProjectCode(projectCode); taskDefinitionLog.setUpdateTime(now); taskDefinitionLog.setOperateTime(now); taskDefinitionLog.setOperator(operator.getId()); if (taskDefinitionLog.getCode() == 0) { taskDefinitionLog.setCode(CodeGenerateUtils.genCode()); } if (taskDefinitionLog.getVersion() == 0) { // init first version taskDefinitionLog.setVersion(Constants.VERSION_FIRST); } TaskDefinitionLog definitionCodeAndVersion = taskDefinitionLogMapper.queryByDefinitionCodeAndVersion( taskDefinitionLog.getCode(), taskDefinitionLog.getVersion()); if (definitionCodeAndVersion == null) { taskDefinitionLog.setUserId(operator.getId()); taskDefinitionLog.setCreateTime(now); newTaskDefinitionLogs.add(taskDefinitionLog); continue; } if (taskDefinitionLog.equals(definitionCodeAndVersion)) { // do nothing if equals continue; } taskDefinitionLog.setUserId(definitionCodeAndVersion.getUserId()); Integer version = taskDefinitionLogMapper.queryMaxVersionForDefinition(taskDefinitionLog.getCode()); taskDefinitionLog.setVersion(version + 1); taskDefinitionLog.setCreateTime(definitionCodeAndVersion.getCreateTime()); updateTaskDefinitionLogs.add(taskDefinitionLog); } if (CollectionUtils.isNotEmpty(updateTaskDefinitionLogs)) { List<Long> taskDefinitionCodes = updateTaskDefinitionLogs .stream() .map(TaskDefinition::getCode) .distinct() .collect(Collectors.toList()); Map<Long, TaskDefinition> taskDefinitionMap = taskDefinitionMapper.queryByCodeList(taskDefinitionCodes) .stream() .collect(Collectors.toMap(TaskDefinition::getCode, Function.identity())); for (TaskDefinitionLog taskDefinitionToUpdate : updateTaskDefinitionLogs) { TaskDefinition task = taskDefinitionMap.get(taskDefinitionToUpdate.getCode()); if (task == null) { newTaskDefinitionLogs.add(taskDefinitionToUpdate); } else { taskDefinitionToUpdate.setId(task.getId()); } } } // for each taskDefinitionLog, we will insert a new version into db // and update the origin one if exist int updateResult = 0; int insertResult = 0; // only insert new task definitions if they not in updateTaskDefinitionLogs List<TaskDefinitionLog> newInsertTaskDefinitionLogs = newTaskDefinitionLogs.stream() .filter(taskDefinitionLog -> !updateTaskDefinitionLogs.contains(taskDefinitionLog)) .collect(Collectors.toList()); if (CollectionUtils.isNotEmpty(newInsertTaskDefinitionLogs)) { insertResult = taskDefinitionLogMapper.batchInsert(newInsertTaskDefinitionLogs); } if (CollectionUtils.isNotEmpty(updateTaskDefinitionLogs)) { insertResult += taskDefinitionLogMapper.batchInsert(updateTaskDefinitionLogs); } if (CollectionUtils.isNotEmpty(newTaskDefinitionLogs) && Boolean.TRUE.equals(syncDefine)) { updateResult += taskDefinitionMapper.batchInsert(newTaskDefinitionLogs); } if (CollectionUtils.isNotEmpty(updateTaskDefinitionLogs) && Boolean.TRUE.equals(syncDefine)) { for (TaskDefinitionLog taskDefinitionLog : updateTaskDefinitionLogs) { updateResult += taskDefinitionMapper.updateById(taskDefinitionLog); } } return (insertResult & updateResult) > 0 ? 1 : Constants.EXIT_CODE_SUCCESS; } /** * save workflowDefinition (including create or update workflowDefinition) */ @Override public int saveWorkflowDefine(User operator, WorkflowDefinition workflowDefinition, Boolean syncDefine, Boolean isFromWorkflowDefinition) { WorkflowDefinitionLog workflowDefinitionLog = new WorkflowDefinitionLog(workflowDefinition); Integer version = workflowDefinitionLogMapper.queryMaxVersionForDefinition(workflowDefinition.getCode()); int insertVersion = version == null || version == 0 ? Constants.VERSION_FIRST : version + 1; workflowDefinitionLog.setVersion(insertVersion); workflowDefinitionLog .setReleaseState( !isFromWorkflowDefinition || workflowDefinitionLog.getReleaseState() == ReleaseState.ONLINE ? ReleaseState.ONLINE : ReleaseState.OFFLINE); workflowDefinitionLog.setOperator(operator.getId()); workflowDefinitionLog.setOperateTime(workflowDefinition.getUpdateTime()); workflowDefinitionLog.setId(null); int insertLog = workflowDefinitionLogMapper.insert(workflowDefinitionLog); int result = 1; if (Boolean.TRUE.equals(syncDefine)) { if (workflowDefinition.getId() == null) { result = workflowDefinitionMapper.insert(workflowDefinitionLog); workflowDefinition.setId(workflowDefinitionLog.getId()); } else { workflowDefinitionLog.setId(workflowDefinition.getId()); result = workflowDefinitionMapper.updateById(workflowDefinitionLog); } } return (insertLog & result) > 0 ? insertVersion : 0; } /** * save task relations */ @Override public int saveTaskRelation(User operator, long projectCode, long workflowDefinitionCode, int workflowDefinitionVersion, List<WorkflowTaskRelationLog> taskRelationList, List<TaskDefinitionLog> taskDefinitionLogs, Boolean syncDefine) { if (taskRelationList.isEmpty()) { return Constants.EXIT_CODE_SUCCESS; } Map<Long, TaskDefinitionLog> taskDefinitionLogMap = null; if (CollectionUtils.isNotEmpty(taskDefinitionLogs)) { taskDefinitionLogMap = taskDefinitionLogs .stream() .collect(Collectors.toMap(TaskDefinition::getCode, taskDefinitionLog -> taskDefinitionLog)); } Date now = new Date(); for (WorkflowTaskRelationLog workflowTaskRelationLog : taskRelationList) { workflowTaskRelationLog.setProjectCode(projectCode); workflowTaskRelationLog.setWorkflowDefinitionCode(workflowDefinitionCode); workflowTaskRelationLog.setWorkflowDefinitionVersion(workflowDefinitionVersion); if (taskDefinitionLogMap != null) { TaskDefinitionLog preTaskDefinitionLog = taskDefinitionLogMap.get(workflowTaskRelationLog.getPreTaskCode()); if (preTaskDefinitionLog != null) { workflowTaskRelationLog.setPreTaskVersion(preTaskDefinitionLog.getVersion()); } TaskDefinitionLog postTaskDefinitionLog = taskDefinitionLogMap.get(workflowTaskRelationLog.getPostTaskCode()); if (postTaskDefinitionLog != null) { workflowTaskRelationLog.setPostTaskVersion(postTaskDefinitionLog.getVersion()); } } workflowTaskRelationLog.setCreateTime(now); workflowTaskRelationLog.setUpdateTime(now); workflowTaskRelationLog.setOperator(operator.getId()); workflowTaskRelationLog.setOperateTime(now); } int insert = taskRelationList.size(); if (Boolean.TRUE.equals(syncDefine)) { List<WorkflowTaskRelation> workflowTaskRelationList = workflowTaskRelationMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode); if (!workflowTaskRelationList.isEmpty()) { Set<Integer> workflowTaskRelationSet = workflowTaskRelationList.stream().map(WorkflowTaskRelation::hashCode).collect(toSet()); Set<Integer> taskRelationSet = taskRelationList.stream().map(WorkflowTaskRelationLog::hashCode).collect(toSet()); boolean result = CollectionUtils.isEqualCollection(workflowTaskRelationSet, taskRelationSet); if (result) { return Constants.EXIT_CODE_SUCCESS; } workflowTaskRelationMapper.deleteByWorkflowDefinitionCode(projectCode, workflowDefinitionCode); } List<WorkflowTaskRelation> workflowTaskRelations = taskRelationList.stream().map(WorkflowTaskRelation::new).collect(Collectors.toList()); insert = workflowTaskRelationMapper.batchInsert(workflowTaskRelations); } int resultLog = workflowTaskRelationLogMapper.batchInsert(taskRelationList); return (insert & resultLog) > 0 ? Constants.EXIT_CODE_SUCCESS : Constants.EXIT_CODE_FAILURE; } @Override public boolean isTaskOnline(long taskCode) { List<WorkflowTaskRelation> workflowTaskRelationList = workflowTaskRelationMapper.queryByTaskCode(taskCode); if (!workflowTaskRelationList.isEmpty()) { Set<Long> processDefinitionCodes = workflowTaskRelationList .stream() .map(WorkflowTaskRelation::getWorkflowDefinitionCode) .collect(toSet()); List<WorkflowDefinition> workflowDefinitionList = workflowDefinitionMapper.queryByCodes(processDefinitionCodes); // check process definition is already online for (WorkflowDefinition workflowDefinition : workflowDefinitionList) { if (workflowDefinition.getReleaseState() == ReleaseState.ONLINE) { return true; } } } return false; } /** * Generate the DAG Graph based on the workflow definition id * Use temporarily before refactoring taskNode * * @param workflowDefinition workflow definition * @return dag graph */ @Override public DAG<Long, TaskNode, TaskNodeRelation> genDagGraph(WorkflowDefinition workflowDefinition) { List<WorkflowTaskRelation> taskRelations = this.findRelationByCode(workflowDefinition.getCode(), workflowDefinition.getVersion()); List<TaskNode> taskNodeList = transformTask(taskRelations, Lists.newArrayList()); WorkflowDag workflowDag = DagHelper.getWorkflowDag(taskNodeList, new ArrayList<>(taskRelations)); // Generate concrete Dag to be executed return DagHelper.buildDagGraph(workflowDag); } /** * generate DagData */ @Override public DagData genDagData(WorkflowDefinition workflowDefinition) { List<WorkflowTaskRelation> taskRelations = findRelationByCode(workflowDefinition.getCode(), workflowDefinition.getVersion()); List<TaskDefinition> taskDefinitions = taskDefinitionLogDao.queryTaskDefineLogList(taskRelations) .stream() .map(t -> (TaskDefinition) t) .collect(Collectors.toList()); return new DagData(workflowDefinition, taskRelations, taskDefinitions); } /** * find workflow task relation list by workflow */ @Override public List<WorkflowTaskRelation> findRelationByCode(long workflowDefinitionCode, int workflowDefinitionVersion) { List<WorkflowTaskRelationLog> workflowTaskRelationLogList = workflowTaskRelationLogMapper .queryByWorkflowCodeAndVersion(workflowDefinitionCode, workflowDefinitionVersion); return workflowTaskRelationLogList.stream().map(r -> (WorkflowTaskRelation) r).collect(Collectors.toList()); } /** * Use temporarily before refactoring taskNode */ @Override public List<TaskNode> transformTask(List<WorkflowTaskRelation> taskRelationList, List<TaskDefinitionLog> taskDefinitionLogs) { Map<Long, List<Long>> taskCodeMap = new HashMap<>(); for (WorkflowTaskRelation workflowTaskRelation : taskRelationList) { taskCodeMap.compute(workflowTaskRelation.getPostTaskCode(), (k, v) -> { if (v == null) { v = new ArrayList<>(); } if (workflowTaskRelation.getPreTaskCode() != 0L) { v.add(workflowTaskRelation.getPreTaskCode()); } return v; }); } if (CollectionUtils.isEmpty(taskDefinitionLogs)) { taskDefinitionLogs = taskDefinitionLogDao.queryTaskDefineLogList(taskRelationList); } Map<Long, TaskDefinitionLog> taskDefinitionLogMap = taskDefinitionLogs.stream() .collect(Collectors.toMap(TaskDefinitionLog::getCode, taskDefinitionLog -> taskDefinitionLog)); List<TaskNode> taskNodeList = new ArrayList<>(); for (Entry<Long, List<Long>> code : taskCodeMap.entrySet()) { TaskDefinitionLog taskDefinitionLog = taskDefinitionLogMap.get(code.getKey()); if (taskDefinitionLog != null) { TaskNode taskNode = new TaskNode(); taskNode.setCode(taskDefinitionLog.getCode()); taskNode.setVersion(taskDefinitionLog.getVersion()); taskNode.setName(taskDefinitionLog.getName()); taskNode.setDesc(taskDefinitionLog.getDescription()); taskNode.setType(taskDefinitionLog.getTaskType().toUpperCase()); taskNode.setRunFlag(taskDefinitionLog.getFlag() == Flag.YES ? Constants.FLOWNODE_RUN_FLAG_NORMAL : Constants.FLOWNODE_RUN_FLAG_FORBIDDEN); taskNode.setMaxRetryTimes(taskDefinitionLog.getFailRetryTimes()); taskNode.setRetryInterval(taskDefinitionLog.getFailRetryInterval()); taskNode.setParams(taskDefinitionLog.getTaskParams()); taskNode.setTaskInstancePriority(taskDefinitionLog.getTaskPriority()); taskNode.setWorkerGroup(taskDefinitionLog.getWorkerGroup()); taskNode.setEnvironmentCode(taskDefinitionLog.getEnvironmentCode()); taskNode.setTimeout(JSONUtils .toJsonString(new TaskTimeoutParameter(taskDefinitionLog.getTimeoutFlag() == TimeoutFlag.OPEN, taskDefinitionLog.getTimeoutNotifyStrategy(), taskDefinitionLog.getTimeout()))); taskNode.setDelayTime(taskDefinitionLog.getDelayTime()); taskNode.setPreTasks(JSONUtils.toJsonString(code.getValue().stream().map(taskDefinitionLogMap::get) .map(TaskDefinition::getCode).collect(Collectors.toList()))); taskNode.setTaskGroupId(taskDefinitionLog.getTaskGroupId()); taskNode.setTaskGroupPriority(taskDefinitionLog.getTaskGroupPriority()); taskNode.setCpuQuota(taskDefinitionLog.getCpuQuota()); taskNode.setMemoryMax(taskDefinitionLog.getMemoryMax()); taskNode.setTaskExecuteType(taskDefinitionLog.getTaskExecuteType()); taskNodeList.add(taskNode); } } return taskNodeList; } /** * find k8s config yaml by clusterName * * @param clusterName clusterName * @return datasource */ @Override public String findConfigYamlByName(String clusterName) { if (Strings.isNullOrEmpty(clusterName)) { return null; } QueryWrapper<Cluster> nodeWrapper = new QueryWrapper<>(); nodeWrapper.eq("name", clusterName); Cluster cluster = clusterMapper.selectOne(nodeWrapper); return cluster == null ? null : ClusterConfUtils.getK8sConfig(cluster.getConfig()); } @Override public void forceWorkflowInstanceSuccessByTaskInstanceId(TaskInstance task) { WorkflowInstance workflowInstance = findWorkflowInstanceDetailById(task.getWorkflowInstanceId()).orElse(null); if (workflowInstance != null && (workflowInstance.getState().isFailure() || workflowInstance.getState().isStopped())) { List<TaskInstance> validTaskList = taskInstanceDao.queryValidTaskListByWorkflowInstanceId(workflowInstance.getId()); List<Long> instanceTaskCodeList = validTaskList.stream().map(TaskInstance::getTaskCode).collect(Collectors.toList()); List<WorkflowTaskRelation> taskRelations = findRelationByCode(workflowInstance.getWorkflowDefinitionCode(), workflowInstance.getWorkflowDefinitionVersion()); List<TaskDefinitionLog> taskDefinitionLogs = taskDefinitionLogDao.queryTaskDefineLogList(taskRelations); List<Long> definiteTaskCodeList = taskDefinitionLogs.stream().filter(definitionLog -> definitionLog.getFlag() == Flag.YES) .map(TaskDefinitionLog::getCode).collect(Collectors.toList()); // only all tasks have instances if (CollectionUtils.isEqualCollection(instanceTaskCodeList, definiteTaskCodeList)) { List<Integer> failTaskList = validTaskList.stream() .filter(instance -> instance.getState().isFailure() || instance.getState().isKill()) .map(TaskInstance::getId).collect(Collectors.toList()); if (failTaskList.size() == 1 && failTaskList.contains(task.getId())) { workflowInstance.setStateWithDesc(WorkflowExecutionStatus.SUCCESS, "success by task force success"); workflowInstanceDao.updateById(workflowInstance); } } } } }
apache/cxf
35,291
rt/rs/description-openapi-v3/src/main/java/org/apache/cxf/jaxrs/openapi/OpenApiFeature.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cxf.jaxrs.openapi; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Enumeration; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Properties; import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; import jakarta.ws.rs.container.ContainerRequestContext; import jakarta.ws.rs.container.ContainerRequestFilter; import jakarta.ws.rs.container.PreMatching; import jakarta.ws.rs.core.Application; import jakarta.ws.rs.core.Context; import jakarta.ws.rs.core.Response; import jakarta.ws.rs.core.UriInfo; import org.apache.cxf.Bus; import org.apache.cxf.annotations.Provider; import org.apache.cxf.annotations.Provider.Scope; import org.apache.cxf.annotations.Provider.Type; import org.apache.cxf.common.util.PropertyUtils; import org.apache.cxf.common.util.StringUtils; import org.apache.cxf.endpoint.Server; import org.apache.cxf.feature.AbstractPortableFeature; import org.apache.cxf.feature.DelegatingFeature; import org.apache.cxf.jaxrs.JAXRSServiceFactoryBean; import org.apache.cxf.jaxrs.common.openapi.DefaultApplicationFactory; import org.apache.cxf.jaxrs.common.openapi.SwaggerProperties; import org.apache.cxf.jaxrs.ext.MessageContext; import org.apache.cxf.jaxrs.provider.ServerProviderFactory; import org.apache.cxf.jaxrs.swagger.ui.SwaggerUiConfig; import org.apache.cxf.jaxrs.swagger.ui.SwaggerUiSupport; import io.swagger.v3.jaxrs2.integration.JaxrsOpenApiContextBuilder; import io.swagger.v3.jaxrs2.integration.resources.BaseOpenApiResource; import io.swagger.v3.jaxrs2.integration.resources.OpenApiResource; import io.swagger.v3.oas.integration.GenericOpenApiContextBuilder; import io.swagger.v3.oas.integration.OpenApiConfigurationException; import io.swagger.v3.oas.integration.SwaggerConfiguration; import io.swagger.v3.oas.integration.api.OpenAPIConfiguration; import io.swagger.v3.oas.integration.api.OpenApiContext; import io.swagger.v3.oas.models.Components; import io.swagger.v3.oas.models.OpenAPI; import io.swagger.v3.oas.models.info.Contact; import io.swagger.v3.oas.models.info.Info; import io.swagger.v3.oas.models.info.License; import io.swagger.v3.oas.models.security.SecurityScheme; @Provider(value = Type.Feature, scope = Scope.Server) public class OpenApiFeature extends DelegatingFeature<OpenApiFeature.Portable> implements SwaggerUiSupport, SwaggerProperties { public OpenApiFeature() { super(new Portable()); } public boolean isScan() { return delegate.isScan(); } public void setScan(boolean scan) { delegate.setScan(scan); } public String getFilterClass() { return delegate.getFilterClass(); } public void setFilterClass(String filterClass) { delegate.setFilterClass(filterClass); } public Set<String> getResourcePackages() { return delegate.getResourcePackages(); } public void setResourcePackages(Set<String> resourcePackages) { delegate.setResourcePackages(resourcePackages); } public String getVersion() { return delegate.getVersion(); } public void setVersion(String version) { delegate.setVersion(version); } public String getTitle() { return delegate.getTitle(); } public void setTitle(String title) { delegate.setTitle(title); } public String getDescription() { return delegate.getDescription(); } public void setDescription(String description) { delegate.setDescription(description); } public String getContactName() { return delegate.getContactName(); } public void setContactName(String contactName) { delegate.setContactName(contactName); } public String getContactEmail() { return delegate.getContactEmail(); } public void setContactEmail(String contactEmail) { delegate.setContactEmail(contactEmail); } public String getContactUrl() { return delegate.getContactUrl(); } public void setContactUrl(String contactUrl) { delegate.setContactUrl(contactUrl); } public String getLicense() { return delegate.getLicense(); } public void setLicense(String license) { delegate.setLicense(license); } public String getLicenseUrl() { return delegate.getLicenseUrl(); } public void setLicenseUrl(String licenseUrl) { delegate.setLicenseUrl(licenseUrl); } public String getTermsOfServiceUrl() { return delegate.getTermsOfServiceUrl(); } public void setTermsOfServiceUrl(String termsOfServiceUrl) { delegate.setTermsOfServiceUrl(termsOfServiceUrl); } public boolean isReadAllResources() { return delegate.isReadAllResources(); } public void setReadAllResources(boolean readAllResources) { delegate.setReadAllResources(readAllResources); } public Set<String> getResourceClasses() { return delegate.getResourceClasses(); } public void setResourceClasses(Set<String> resourceClasses) { delegate.setResourceClasses(resourceClasses); } public Collection<String> getIgnoredRoutes() { return delegate.getIgnoredRoutes(); } public void setIgnoredRoutes(Collection<String> ignoredRoutes) { delegate.setIgnoredRoutes(ignoredRoutes); } public boolean isPrettyPrint() { return delegate.isPrettyPrint(); } public void setPrettyPrint(boolean prettyPrint) { delegate.setPrettyPrint(prettyPrint); } public boolean isRunAsFilter() { return delegate.isRunAsFilter(); } @Override public Boolean isSupportSwaggerUi() { return delegate.isSupportSwaggerUi(); } public void setSupportSwaggerUi(Boolean supportSwaggerUi) { delegate.setSupportSwaggerUi(supportSwaggerUi); } public String getSwaggerUiVersion() { return delegate.getSwaggerUiVersion(); } public void setSwaggerUiVersion(String swaggerUiVersion) { delegate.setSwaggerUiVersion(swaggerUiVersion); } public String getSwaggerUiMavenGroupAndArtifact() { return delegate.getSwaggerUiMavenGroupAndArtifact(); } public void setSwaggerUiMavenGroupAndArtifact(String swaggerUiMavenGroupAndArtifact) { delegate.setSwaggerUiMavenGroupAndArtifact(swaggerUiMavenGroupAndArtifact); } @Override public Map<String, String> getSwaggerUiMediaTypes() { return delegate.getSwaggerUiMediaTypes(); } public void setSwaggerUiMediaTypes(Map<String, String> swaggerUiMediaTypes) { delegate.setSwaggerUiMediaTypes(swaggerUiMediaTypes); } public String getConfigLocation() { return delegate.getConfigLocation(); } public void setConfigLocation(String configLocation) { delegate.setConfigLocation(configLocation); } public String getPropertiesLocation() { return delegate.getPropertiesLocation(); } public void setPropertiesLocation(String propertiesLocation) { delegate.setPropertiesLocation(propertiesLocation); } public void setRunAsFilter(boolean runAsFilter) { delegate.setRunAsFilter(runAsFilter); } public Map<String, SecurityScheme> getSecurityDefinitions() { return delegate.getSecurityDefinitions(); } public void setSecurityDefinitions(Map<String, SecurityScheme> securityDefinitions) { delegate.setSecurityDefinitions(securityDefinitions); } public OpenApiCustomizer getCustomizer() { return delegate.getCustomizer(); } public void setCustomizer(OpenApiCustomizer customizer) { delegate.setCustomizer(customizer); } public void setScanKnownConfigLocations(boolean scanKnownConfigLocations) { delegate.setScanKnownConfigLocations(scanKnownConfigLocations); } public boolean isScanKnownConfigLocations() { return delegate.isScanKnownConfigLocations(); } public void setSwaggerUiConfig(SwaggerUiConfig swaggerUiConfig) { delegate.setSwaggerUiConfig(swaggerUiConfig); } public void setUseContextBasedConfig(boolean useContextBasedConfig) { delegate.setUseContextBasedConfig(useContextBasedConfig); } public boolean isUseContextBasedConfig() { return delegate.isUseContextBasedConfig(); } public String getScannerClass() { return delegate.getScannerClass(); } public void setScannerClass(String scannerClass) { delegate.setScannerClass(scannerClass); } @Override public SwaggerUiConfig getSwaggerUiConfig() { return delegate.getSwaggerUiConfig(); } @Override public String findSwaggerUiRoot() { return delegate.findSwaggerUiRoot(); } public Properties getUserProperties(Map<String, Object> userDefinedOptions) { return delegate.getUserProperties(userDefinedOptions); } public void registerOpenApiResources(JAXRSServiceFactoryBean sfb, Set<String> packages, OpenAPIConfiguration config) { delegate.registerOpenApiResources(sfb, packages, config); } public void registerServletConfigProvider(ServerProviderFactory factory) { delegate.registerServletConfigProvider(factory); } public void registerSwaggerUiResources(JAXRSServiceFactoryBean sfb, Properties properties, ServerProviderFactory factory, Bus bus) { delegate.registerSwaggerUiResources(sfb, properties, factory, bus); } public Info getInfo(Properties properties) { return delegate.getInfo(properties); } public String getOrFallback(String value, Properties properties, String property) { return delegate.getOrFallback(value, properties, property); } public Boolean getOrFallback(Boolean value, Properties properties, String property) { return delegate.getOrFallback(value, properties, property); } public Set<String> getOrFallback(Set<String> collection, Properties properties, String property) { return delegate.getOrFallback(collection, properties, property); } public Collection<String> scanResourcePackages(JAXRSServiceFactoryBean sfb) { return delegate.scanResourcePackages(sfb); } public static Properties combine(Properties primary, Properties secondary) { return Portable.combine(primary, secondary); } public static void setOrReplace(Properties source, Properties destination) { Portable.setOrReplace(source, destination); } public static Optional<Components> registerComponents(Map<String, SecurityScheme> securityDefinitions) { return Portable.registerComponents(securityDefinitions); } public BaseOpenApiResource createOpenApiResource() { return delegate.createOpenApiResource(); } public static class Portable implements AbstractPortableFeature, SwaggerUiSupport, SwaggerProperties { private String version; private String title; private String description; private String contactName; private String contactEmail; private String contactUrl; private String license; private String licenseUrl; private String termsOfServiceUrl; // Read all operations also with no @Operation private boolean readAllResources = true; // Scan all JAX-RS resources automatically private boolean scan = true; private boolean prettyPrint = true; private boolean runAsFilter; private Collection<String> ignoredRoutes; private Set<String> resourcePackages; private Set<String> resourceClasses; private String filterClass; private Boolean supportSwaggerUi; private String swaggerUiVersion; private String swaggerUiMavenGroupAndArtifact; private Map<String, String> swaggerUiMediaTypes; // Additional components private Map<String, SecurityScheme> securityDefinitions; private OpenApiCustomizer customizer; // Allows to pass the configuration location, usually openapi-configuration.json // or openapi-configuration.yml file. private String configLocation; // Allows to pass the properties location, by default swagger.properties private String propertiesLocation = DEFAULT_PROPS_LOCATION; // Allows to disable automatic scan of known configuration locations (enabled by default) private boolean scanKnownConfigLocations = true; // Swagger UI configuration parameters (to be passed as query string). private SwaggerUiConfig swaggerUiConfig; // Generates the Swagger Context ID (instead of using the default one). It is // necessary when more than one JAXRS Server Factory Bean or OpenApiFeature instance // are co-located in the same application. private boolean useContextBasedConfig; private String ctxId; // The API Scanner class to use private String scannerClass; @Override public void initialize(Server server, Bus bus) { final JAXRSServiceFactoryBean sfb = (JAXRSServiceFactoryBean)server .getEndpoint() .get(JAXRSServiceFactoryBean.class.getName()); final ServerProviderFactory factory = (ServerProviderFactory)server .getEndpoint() .get(ServerProviderFactory.class.getName()); final Set<String> packages = new HashSet<>(); if (resourcePackages != null) { packages.addAll(resourcePackages); } // Generate random Context ID for Swagger if (useContextBasedConfig) { ctxId = UUID.randomUUID().toString(); } Properties swaggerProps = null; GenericOpenApiContextBuilder<?> openApiConfiguration; final Application application = DefaultApplicationFactory.createApplicationOrDefault(server, factory, sfb, bus, resourcePackages, isScan()); String defaultConfigLocation = getConfigLocation(); if (scanKnownConfigLocations && StringUtils.isEmpty(defaultConfigLocation)) { defaultConfigLocation = OpenApiDefaultConfigurationScanner.locateDefaultConfiguration().orElse(null); } if (StringUtils.isEmpty(defaultConfigLocation)) { swaggerProps = getSwaggerProperties(propertiesLocation, bus); if (isScan()) { packages.addAll(scanResourcePackages(sfb)); } final OpenAPI oas = new OpenAPI().info(getInfo(swaggerProps)); registerComponents(securityDefinitions).ifPresent(oas::setComponents); final SwaggerConfiguration config = new SwaggerConfiguration() .openAPI(oas) .prettyPrint(getOrFallback(isPrettyPrint(), swaggerProps, PRETTY_PRINT_PROPERTY)) .readAllResources(isReadAllResources()) .ignoredRoutes(getIgnoredRoutes()) .filterClass(getOrFallback(getFilterClass(), swaggerProps, FILTER_CLASS_PROPERTY)) .resourceClasses(getResourceClasses()) .resourcePackages(getOrFallback(packages, swaggerProps, RESOURCE_PACKAGE_PROPERTY)); if (!StringUtils.isEmpty(getScannerClass())) { config.setScannerClass(getScannerClass()); } openApiConfiguration = new JaxrsOpenApiContextBuilder<>() .application(application) .openApiConfiguration(config) .ctxId(ctxId); /* will be null if not used */ } else { openApiConfiguration = new JaxrsOpenApiContextBuilder<>() .application(application) .configLocation(defaultConfigLocation) .ctxId(ctxId); /* will be null if not used */ } try { final OpenApiContext context = openApiConfiguration.buildContext(true); final Properties userProperties = getUserProperties( context .getOpenApiConfiguration() .getUserDefinedOptions()); registerOpenApiResources(sfb, packages, context.getOpenApiConfiguration()); registerSwaggerUiResources(sfb, combine(swaggerProps, userProperties), factory, bus); registerSwaggerContainerRequestFilter(factory, application, context.getOpenApiConfiguration()); if (useContextBasedConfig) { registerServletConfigProvider(factory); } if (customizer != null) { customizer.setApplicationInfo(factory.getApplicationProvider()); } bus.setProperty("openapi.service.description.available", "true"); } catch (OpenApiConfigurationException ex) { throw new RuntimeException("Unable to initialize OpenAPI context", ex); } } private void registerSwaggerContainerRequestFilter(ServerProviderFactory factory, Application application, OpenAPIConfiguration config) { if (isRunAsFilter()) { List<Object> providers = new ArrayList<>(); BaseOpenApiResource filter = createOpenApiRequestFilter(application).openApiConfiguration(config) .configLocation(configLocation); providers.add(filter); factory.setUserProviders(providers); } } public boolean isScan() { return scan; } public void setScan(boolean scan) { this.scan = scan; } public String getFilterClass() { return filterClass; } public void setFilterClass(String filterClass) { this.filterClass = filterClass; } public Set<String> getResourcePackages() { return resourcePackages; } public void setResourcePackages(Set<String> resourcePackages) { this.resourcePackages = (resourcePackages == null) ? null : new HashSet<>(resourcePackages); } public String getVersion() { return version; } public void setVersion(String version) { this.version = version; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public String getContactName() { return contactName; } public void setContactName(String contactName) { this.contactName = contactName; } public String getContactEmail() { return contactEmail; } public void setContactEmail(String contactEmail) { this.contactEmail = contactEmail; } public String getContactUrl() { return contactUrl; } public void setContactUrl(String contactUrl) { this.contactUrl = contactUrl; } public String getLicense() { return license; } public void setLicense(String license) { this.license = license; } public String getLicenseUrl() { return licenseUrl; } public void setLicenseUrl(String licenseUrl) { this.licenseUrl = licenseUrl; } public String getTermsOfServiceUrl() { return termsOfServiceUrl; } public void setTermsOfServiceUrl(String termsOfServiceUrl) { this.termsOfServiceUrl = termsOfServiceUrl; } public boolean isReadAllResources() { return readAllResources; } public void setReadAllResources(boolean readAllResources) { this.readAllResources = readAllResources; } public Set<String> getResourceClasses() { return resourceClasses; } public void setResourceClasses(Set<String> resourceClasses) { this.resourceClasses = (resourceClasses == null) ? null : new HashSet<>(resourceClasses); } public Collection<String> getIgnoredRoutes() { return ignoredRoutes; } public void setIgnoredRoutes(Collection<String> ignoredRoutes) { this.ignoredRoutes = (ignoredRoutes == null) ? null : new HashSet<>(ignoredRoutes); } public boolean isPrettyPrint() { return prettyPrint; } public void setPrettyPrint(boolean prettyPrint) { this.prettyPrint = prettyPrint; } public boolean isRunAsFilter() { return runAsFilter; } @Override public Boolean isSupportSwaggerUi() { return supportSwaggerUi; } public void setSupportSwaggerUi(Boolean supportSwaggerUi) { this.supportSwaggerUi = supportSwaggerUi; } public String getSwaggerUiVersion() { return swaggerUiVersion; } public void setSwaggerUiVersion(String swaggerUiVersion) { this.swaggerUiVersion = swaggerUiVersion; } public String getSwaggerUiMavenGroupAndArtifact() { return swaggerUiMavenGroupAndArtifact; } public void setSwaggerUiMavenGroupAndArtifact( String swaggerUiMavenGroupAndArtifact) { this.swaggerUiMavenGroupAndArtifact = swaggerUiMavenGroupAndArtifact; } @Override public Map<String, String> getSwaggerUiMediaTypes() { return swaggerUiMediaTypes; } public void setSwaggerUiMediaTypes(Map<String, String> swaggerUiMediaTypes) { this.swaggerUiMediaTypes = swaggerUiMediaTypes; } public String getConfigLocation() { return configLocation; } public void setConfigLocation(String configLocation) { this.configLocation = configLocation; } public String getPropertiesLocation() { return propertiesLocation; } public void setPropertiesLocation(String propertiesLocation) { this.propertiesLocation = propertiesLocation; } public void setRunAsFilter(boolean runAsFilter) { this.runAsFilter = runAsFilter; } public Map<String, SecurityScheme> getSecurityDefinitions() { return securityDefinitions; } public void setSecurityDefinitions(Map<String, SecurityScheme> securityDefinitions) { this.securityDefinitions = securityDefinitions; } public OpenApiCustomizer getCustomizer() { return customizer; } public void setCustomizer(OpenApiCustomizer customizer) { this.customizer = customizer; } public void setScanKnownConfigLocations(boolean scanKnownConfigLocations) { this.scanKnownConfigLocations = scanKnownConfigLocations; } public boolean isScanKnownConfigLocations() { return scanKnownConfigLocations; } public void setSwaggerUiConfig(final SwaggerUiConfig swaggerUiConfig) { this.swaggerUiConfig = swaggerUiConfig; } public void setUseContextBasedConfig(final boolean useContextBasedConfig) { this.useContextBasedConfig = useContextBasedConfig; } public boolean isUseContextBasedConfig() { return useContextBasedConfig; } public String getScannerClass() { return scannerClass; } public void setScannerClass(String scannerClass) { this.scannerClass = scannerClass; } @Override public SwaggerUiConfig getSwaggerUiConfig() { return swaggerUiConfig; } @Override public String findSwaggerUiRoot() { return SwaggerUi.findSwaggerUiRoot(swaggerUiMavenGroupAndArtifact, swaggerUiVersion); } protected Properties getUserProperties(final Map<String, Object> userDefinedOptions) { final Properties properties = new Properties(); if (userDefinedOptions != null) { userDefinedOptions .entrySet() .stream() .filter(entry -> entry.getValue() != null) .forEach(entry -> properties.setProperty(entry.getKey(), entry.getValue().toString())); } return properties; } protected void registerOpenApiResources( final JAXRSServiceFactoryBean sfb, final Set<String> packages, final OpenAPIConfiguration config) { if (customizer != null) { customizer.setClassResourceInfos(sfb.getClassResourceInfo()); } sfb.setResourceClassesFromBeans(Arrays.asList( createOpenApiResource() .openApiConfiguration(config) .configLocation(configLocation) .resourcePackages(packages))); } protected void registerServletConfigProvider(ServerProviderFactory factory) { factory.setUserProviders(Arrays.asList(new ServletConfigProvider(ctxId))); } protected void registerSwaggerUiResources(JAXRSServiceFactoryBean sfb, Properties properties, ServerProviderFactory factory, Bus bus) { final Registration swaggerUiRegistration = getSwaggerUi(bus, properties, isRunAsFilter()); if (!isRunAsFilter()) { sfb.setResourceClassesFromBeans(swaggerUiRegistration.getResources()); } factory.setUserProviders(swaggerUiRegistration.getProviders()); } /** * The info will be used only if there is no @OpenAPIDefinition annotation is present. */ private Info getInfo(final Properties properties) { final Info info = new Info() .title(getOrFallback(getTitle(), properties, TITLE_PROPERTY)) .version(getOrFallback(getVersion(), properties, VERSION_PROPERTY)) .description(getOrFallback(getDescription(), properties, DESCRIPTION_PROPERTY)) .termsOfService(getOrFallback(getTermsOfServiceUrl(), properties, TERMS_URL_PROPERTY)) .contact(new Contact() .name(getOrFallback(getContactName(), properties, CONTACT_PROPERTY)) .email(getContactEmail()) .url(getContactUrl())) .license(new License() .name(getOrFallback(getLicense(), properties, LICENSE_PROPERTY)) .url(getOrFallback(getLicenseUrl(), properties, LICENSE_URL_PROPERTY))); if (info.getLicense().getName() == null) { info.getLicense().setName(DEFAULT_LICENSE_VALUE); } if (info.getLicense().getUrl() == null && DEFAULT_LICENSE_VALUE.equals(info.getLicense().getName())) { info.getLicense().setUrl(DEFAULT_LICENSE_URL); } return info; } private String getOrFallback(String value, Properties properties, String property) { if (value == null && properties != null) { return properties.getProperty(property); } else { return value; } } private Boolean getOrFallback(Boolean value, Properties properties, String property) { Boolean fallback = value; if (value == null && properties != null) { fallback = PropertyUtils.isTrue(properties.get(PRETTY_PRINT_PROPERTY)); } if (fallback == null) { return false; } return fallback; } private Set<String> getOrFallback(Set<String> collection, Properties properties, String property) { if (collection.isEmpty() && properties != null) { final String value = properties.getProperty(property); if (!StringUtils.isEmpty(value)) { collection.add(value); } } return collection; } private Collection<String> scanResourcePackages(JAXRSServiceFactoryBean sfb) { return sfb .getClassResourceInfo() .stream() .map(cri -> cri.getServiceClass().getPackage().getName()) .collect(Collectors.toSet()); } private static Properties combine(final Properties primary, final Properties secondary) { if (primary == null) { return secondary; } else if (secondary == null) { return primary; } else { final Properties combined = new Properties(); setOrReplace(secondary, combined); setOrReplace(primary, combined); return combined; } } private static void setOrReplace(final Properties source, final Properties destination) { final Enumeration<?> enumeration = source.propertyNames(); while (enumeration.hasMoreElements()) { final String name = (String)enumeration.nextElement(); destination.setProperty(name, source.getProperty(name)); } } private static Optional<Components> registerComponents(Map<String, SecurityScheme> securityDefinitions) { final Components components = new Components(); boolean hasComponents = false; if (securityDefinitions != null && !securityDefinitions.isEmpty()) { securityDefinitions.forEach(components::addSecuritySchemes); hasComponents |= true; } return hasComponents ? Optional.of(components) : Optional.empty(); } private BaseOpenApiResource createOpenApiResource() { return (customizer == null) ? new OpenApiResource() : new OpenApiCustomizedResource(customizer); } private BaseOpenApiResource createOpenApiRequestFilter(Application application) { return (customizer == null) ? new SwaggerContainerRequestFilter(application) : new CustomizedSwaggerContainerRequestFilter(application, customizer); } } @PreMatching protected static class SwaggerContainerRequestFilter extends BaseOpenApiResource implements ContainerRequestFilter { protected static final String APIDOCS_LISTING_PATH_JSON = "openapi.json"; protected static final String APIDOCS_LISTING_PATH_YAML = "openapi.yaml"; @Context protected MessageContext mc; private Application app; public SwaggerContainerRequestFilter(Application app) { this.app = app; } @Override public void filter(ContainerRequestContext requestContext) throws IOException { UriInfo ui = mc.getUriInfo(); Response response = null; if (ui.getPath().endsWith(APIDOCS_LISTING_PATH_JSON)) { try { response = super.getOpenApi(mc.getHttpHeaders(), mc.getServletConfig(), app, ui, "json"); } catch (Exception ex) { throw new IOException(ex); } } else if (ui.getPath().endsWith(APIDOCS_LISTING_PATH_YAML)) { try { response = super.getOpenApi(mc.getHttpHeaders(), mc.getServletConfig(), app, ui, "yaml"); } catch (Exception ex) { throw new IOException(ex); } } if (response != null) { requestContext.abortWith(response); } } } @PreMatching protected static class CustomizedSwaggerContainerRequestFilter extends OpenApiCustomizedResource implements ContainerRequestFilter { protected static final String APIDOCS_LISTING_PATH_JSON = "openapi.json"; protected static final String APIDOCS_LISTING_PATH_YAML = "openapi.yaml"; @Context protected MessageContext mc; private Application app; public CustomizedSwaggerContainerRequestFilter(Application app, OpenApiCustomizer customizer) { super(customizer); this.app = app; } @Override public void filter(ContainerRequestContext requestContext) throws IOException { UriInfo ui = mc.getUriInfo(); Response response = null; if (ui.getPath().endsWith(APIDOCS_LISTING_PATH_JSON)) { try { response = super.getOpenApi(app, mc.getServletConfig(), mc.getHttpHeaders(), ui, "json"); } catch (Exception ex) { throw new IOException(ex); } } else if (ui.getPath().endsWith(APIDOCS_LISTING_PATH_YAML)) { try { response = super.getOpenApi(app, mc.getServletConfig(), mc.getHttpHeaders(), ui, "yaml"); } catch (Exception ex) { throw new IOException(ex); } } if (response != null) { requestContext.abortWith(response); } } } }
apache/ignite
35,194
modules/core/src/main/java/org/apache/ignite/internal/GridKernalContextImpl.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal; import java.io.Externalizable; import java.io.IOException; import java.io.InvalidObjectException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.io.ObjectStreamException; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.Executor; import java.util.concurrent.ForkJoinPool; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.failure.FailureType; import org.apache.ignite.internal.binary.BinaryMarshaller; import org.apache.ignite.internal.binary.GridBinaryMarshaller; import org.apache.ignite.internal.cache.query.index.IndexProcessor; import org.apache.ignite.internal.cache.transform.CacheObjectTransformerProcessor; import org.apache.ignite.internal.maintenance.MaintenanceProcessor; import org.apache.ignite.internal.managers.checkpoint.GridCheckpointManager; import org.apache.ignite.internal.managers.collision.GridCollisionManager; import org.apache.ignite.internal.managers.communication.GridIoManager; import org.apache.ignite.internal.managers.deployment.GridDeploymentManager; import org.apache.ignite.internal.managers.discovery.GridDiscoveryManager; import org.apache.ignite.internal.managers.encryption.GridEncryptionManager; import org.apache.ignite.internal.managers.eventstorage.GridEventStorageManager; import org.apache.ignite.internal.managers.failover.GridFailoverManager; import org.apache.ignite.internal.managers.indexing.GridIndexingManager; import org.apache.ignite.internal.managers.loadbalancer.GridLoadBalancerManager; import org.apache.ignite.internal.managers.systemview.GridSystemViewManager; import org.apache.ignite.internal.managers.tracing.GridTracingManager; import org.apache.ignite.internal.processors.affinity.GridAffinityProcessor; import org.apache.ignite.internal.processors.cache.CacheConflictResolutionManager; import org.apache.ignite.internal.processors.cache.GridCacheProcessor; import org.apache.ignite.internal.processors.cache.binary.CacheObjectBinaryProcessorImpl; import org.apache.ignite.internal.processors.cache.persistence.defragmentation.IgniteDefragmentation; import org.apache.ignite.internal.processors.cache.persistence.defragmentation.IgniteDefragmentationImpl; import org.apache.ignite.internal.processors.cache.persistence.filename.PdsFoldersResolver; import org.apache.ignite.internal.processors.cacheobject.IgniteCacheObjectProcessor; import org.apache.ignite.internal.processors.closure.GridClosureProcessor; import org.apache.ignite.internal.processors.cluster.ClusterProcessor; import org.apache.ignite.internal.processors.cluster.GridClusterStateProcessor; import org.apache.ignite.internal.processors.compress.CompressionProcessor; import org.apache.ignite.internal.processors.configuration.distributed.DistributedConfigurationProcessor; import org.apache.ignite.internal.processors.continuous.GridContinuousProcessor; import org.apache.ignite.internal.processors.datastreamer.DataStreamProcessor; import org.apache.ignite.internal.processors.datastructures.DataStructuresProcessor; import org.apache.ignite.internal.processors.diagnostic.DiagnosticProcessor; import org.apache.ignite.internal.processors.failure.FailureProcessor; import org.apache.ignite.internal.processors.job.GridJobProcessor; import org.apache.ignite.internal.processors.jobmetrics.GridJobMetricsProcessor; import org.apache.ignite.internal.processors.localtask.DurableBackgroundTasksProcessor; import org.apache.ignite.internal.processors.marshaller.GridMarshallerMappingProcessor; import org.apache.ignite.internal.processors.metastorage.DistributedMetaStorage; import org.apache.ignite.internal.processors.metric.GridMetricManager; import org.apache.ignite.internal.processors.nodevalidation.DiscoveryNodeValidationProcessor; import org.apache.ignite.internal.processors.odbc.ClientListenerProcessor; import org.apache.ignite.internal.processors.performancestatistics.PerformanceStatisticsProcessor; import org.apache.ignite.internal.processors.platform.PlatformProcessor; import org.apache.ignite.internal.processors.platform.plugin.PlatformPluginProcessor; import org.apache.ignite.internal.processors.plugin.IgnitePluginProcessor; import org.apache.ignite.internal.processors.pool.PoolProcessor; import org.apache.ignite.internal.processors.port.GridPortProcessor; import org.apache.ignite.internal.processors.query.GridQueryProcessor; import org.apache.ignite.internal.processors.query.QueryEngine; import org.apache.ignite.internal.processors.resource.GridResourceProcessor; import org.apache.ignite.internal.processors.rest.IgniteRestProcessor; import org.apache.ignite.internal.processors.schedule.IgniteScheduleProcessorAdapter; import org.apache.ignite.internal.processors.security.IgniteSecurity; import org.apache.ignite.internal.processors.segmentation.GridSegmentationProcessor; import org.apache.ignite.internal.processors.service.IgniteServiceProcessor; import org.apache.ignite.internal.processors.session.GridTaskSessionProcessor; import org.apache.ignite.internal.processors.subscription.GridInternalSubscriptionProcessor; import org.apache.ignite.internal.processors.task.GridTaskProcessor; import org.apache.ignite.internal.processors.timeout.GridTimeoutProcessor; import org.apache.ignite.internal.processors.tracing.Tracing; import org.apache.ignite.internal.suggestions.GridPerformanceSuggestions; import org.apache.ignite.internal.util.IgniteExceptionRegistry; import org.apache.ignite.internal.util.spring.IgniteSpringHelper; import org.apache.ignite.internal.util.tostring.GridToStringExclude; import org.apache.ignite.internal.util.tostring.GridToStringInclude; import org.apache.ignite.internal.util.typedef.X; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.internal.worker.WorkersRegistry; import org.apache.ignite.lang.IgnitePredicate; import org.apache.ignite.maintenance.MaintenanceRegistry; import org.apache.ignite.plugin.PluginNotFoundException; import org.apache.ignite.plugin.PluginProvider; import org.jetbrains.annotations.Nullable; import static org.apache.ignite.internal.IgniteComponentType.SPRING; /** * Implementation of kernal context. */ @GridToStringExclude public class GridKernalContextImpl implements GridKernalContext, Externalizable { static { GridBinaryMarshaller.binaryContextSupplier(() -> IgnitionEx.localIgnite().context().cacheObjects().binaryContext()); } /** */ private static final long serialVersionUID = 0L; /** */ private static final ThreadLocal<String> stash = new ThreadLocal<>(); /* * Managers. * ======== */ /** */ @GridToStringExclude private GridDeploymentManager depMgr; /** */ @GridToStringExclude private GridIoManager ioMgr; /** */ @GridToStringExclude private GridDiscoveryManager discoMgr; /** */ @GridToStringExclude private GridCheckpointManager cpMgr; /** */ @GridToStringExclude private GridEventStorageManager evtMgr; /** */ @GridToStringExclude private GridFailoverManager failoverMgr; /** */ @GridToStringExclude private GridCollisionManager colMgr; /** */ @GridToStringExclude private GridLoadBalancerManager loadMgr; /** */ @GridToStringExclude private IgniteSecurity security; /** */ @GridToStringExclude private GridIndexingManager indexingMgr; /** */ @GridToStringExclude private IndexProcessor indexProc; /** */ @GridToStringExclude private GridEncryptionManager encryptionMgr; /** */ @GridToStringExclude private IgniteDefragmentation defragMgr; /** */ @GridToStringExclude private GridTracingManager tracingMgr; /* * Processors. * ========== */ /** */ @GridToStringInclude private ClientListenerProcessor clientListenerProc; /** */ @GridToStringInclude private GridQueryProcessor qryProc; /** */ @GridToStringInclude private GridTaskProcessor taskProc; /** */ @GridToStringInclude private GridJobProcessor jobProc; /** */ @GridToStringInclude private GridTimeoutProcessor timeProc; /** */ @GridToStringInclude private GridResourceProcessor rsrcProc; /** */ @GridToStringInclude private GridJobMetricsProcessor jobMetricsProc; /** */ @GridToStringInclude private GridMetricManager metricMgr; /** */ @GridToStringInclude private GridSystemViewManager sysViewMgr; /** */ @GridToStringInclude private GridClosureProcessor closProc; /** */ @GridToStringInclude private IgniteServiceProcessor srvcProc; /** */ @GridToStringInclude private GridCacheProcessor cacheProc; /** Cluster state process. */ @GridToStringInclude private GridClusterStateProcessor stateProc; /** Global metastorage. */ @GridToStringInclude private DistributedMetaStorage distributedMetastorage; /** Global metastorage. */ @GridToStringInclude private DistributedConfigurationProcessor distributedConfigurationProcessor; /** */ @GridToStringInclude private GridTaskSessionProcessor sesProc; /** */ @GridToStringInclude private GridPortProcessor portProc; /** */ @GridToStringInclude private IgniteScheduleProcessorAdapter scheduleProc; /** */ @GridToStringInclude private IgniteRestProcessor restProc; /** */ @GridToStringInclude private DataStreamProcessor dataLdrProc; /** */ @GridToStringInclude private GridSegmentationProcessor segProc; /** */ @GridToStringInclude private GridAffinityProcessor affProc; /** */ @GridToStringExclude private GridContinuousProcessor contProc; /** */ @GridToStringExclude private PoolProcessor poolProc; /** */ @GridToStringExclude private GridMarshallerMappingProcessor mappingProc; /** */ @GridToStringExclude private IgnitePluginProcessor pluginProc; /** */ @GridToStringExclude private IgniteCacheObjectProcessor cacheObjProc; /** */ @GridToStringExclude private PlatformProcessor platformProc; /** */ @GridToStringExclude private IgniteSpringHelper spring; /** */ @GridToStringExclude private ClusterProcessor cluster; /** */ @GridToStringExclude private CompressionProcessor compressProc; /** */ @GridToStringExclude private DataStructuresProcessor dataStructuresProc; /** Diagnostic processor. */ @GridToStringInclude private DiagnosticProcessor diagnosticProcessor; /** */ @GridToStringExclude private MaintenanceProcessor maintenanceProc; /** */ @GridToStringExclude private CacheObjectTransformerProcessor transProc; /** */ @GridToStringExclude private List<GridComponent> comps = new LinkedList<>(); /** */ @GridToStringExclude private Map<String, Object> attrs = new HashMap<>(); /** */ @GridToStringExclude private WorkersRegistry workersRegistry; /** */ @GridToStringExclude private LongJVMPauseDetector pauseDetector; /** */ @GridToStringExclude private DurableBackgroundTasksProcessor durableBackgroundTasksProcessor; /** Performance statistics processor. */ @GridToStringExclude private PerformanceStatisticsProcessor perfStatProc; /** */ private Thread.UncaughtExceptionHandler hnd; /** */ private IgniteEx grid; /** */ private IgniteConfiguration cfg; /** */ private GridKernalGateway gw; /** Network segmented flag. */ private volatile boolean segFlag; /** Performance suggestions. */ private final GridPerformanceSuggestions perf = new GridPerformanceSuggestions(); /** Marshaller context. */ private MarshallerContextImpl marshCtx; /** */ private ClusterNode locNode; /** */ private volatile boolean disconnected; /** PDS mode folder name resolver, also generates consistent ID in case new folder naming is used */ private PdsFoldersResolver pdsFolderRslvr; /** */ private GridInternalSubscriptionProcessor internalSubscriptionProc; /** Failure processor. */ private FailureProcessor failureProc; /** Recovery mode flag. Flag is set to {@code false} when discovery manager started. */ private boolean recoveryMode = true; /** Marshaller. */ private final BinaryMarshaller marsh = new BinaryMarshaller(); /** * No-arg constructor is required by externalization. */ public GridKernalContextImpl() { // No-op. } /** * Creates new kernal context. * * @param log Logger. * @param grid Grid instance managed by kernal. * @param cfg Grid configuration. * @param gw Kernal gateway. * @param plugins Plugin providers. * @param hnd Default uncaught exception handler used by thread pools. * @param pauseDetector Long JVM pause detector. */ @SuppressWarnings("TypeMayBeWeakened") protected GridKernalContextImpl( GridLoggerProxy log, IgniteEx grid, IgniteConfiguration cfg, GridKernalGateway gw, List<PluginProvider> plugins, IgnitePredicate<String> clsFilter, WorkersRegistry workerRegistry, Thread.UncaughtExceptionHandler hnd, LongJVMPauseDetector pauseDetector ) { assert grid != null; assert cfg != null; assert gw != null; this.grid = grid; this.cfg = cfg; this.gw = gw; this.workersRegistry = workerRegistry; this.hnd = hnd; this.pauseDetector = pauseDetector; marshCtx = new MarshallerContextImpl(plugins, clsFilter); defragMgr = new IgniteDefragmentationImpl(this); try { spring = SPRING.create(false); } catch (IgniteCheckedException ignored) { if (log != null && log.isDebugEnabled()) log.debug("Failed to load spring component, will not be able to extract userVersion from " + "META-INF/ignite.xml."); } } /** {@inheritDoc} */ @Override public Iterator<GridComponent> iterator() { return comps.iterator(); } /** {@inheritDoc} */ @Override public List<GridComponent> components() { return Collections.unmodifiableList(comps); } /** * @param comp Manager to add. */ public void add(GridComponent comp) { add(comp, true); } /** * @param comp Manager to add. * @param addToList If {@code true} component is added to components list. */ public void add(GridComponent comp, boolean addToList) { assert comp != null; /* * Managers. * ======== */ if (comp instanceof GridDeploymentManager) depMgr = (GridDeploymentManager)comp; else if (comp instanceof GridIoManager) ioMgr = (GridIoManager)comp; else if (comp instanceof GridDiscoveryManager) discoMgr = (GridDiscoveryManager)comp; else if (comp instanceof GridCheckpointManager) cpMgr = (GridCheckpointManager)comp; else if (comp instanceof GridEventStorageManager) evtMgr = (GridEventStorageManager)comp; else if (comp instanceof GridFailoverManager) failoverMgr = (GridFailoverManager)comp; else if (comp instanceof GridCollisionManager) colMgr = (GridCollisionManager)comp; else if (comp instanceof GridLoadBalancerManager) loadMgr = (GridLoadBalancerManager)comp; else if (comp instanceof GridIndexingManager) indexingMgr = (GridIndexingManager)comp; else if (comp instanceof GridEncryptionManager) encryptionMgr = (GridEncryptionManager)comp; else if (comp instanceof GridTracingManager) tracingMgr = (GridTracingManager)comp; /* * Processors. * ========== */ else if (comp instanceof FailureProcessor) failureProc = (FailureProcessor)comp; else if (comp instanceof GridTaskProcessor) taskProc = (GridTaskProcessor)comp; else if (comp instanceof GridJobProcessor) jobProc = (GridJobProcessor)comp; else if (comp instanceof GridTimeoutProcessor) timeProc = (GridTimeoutProcessor)comp; else if (comp instanceof GridResourceProcessor) rsrcProc = (GridResourceProcessor)comp; else if (comp instanceof GridJobMetricsProcessor) jobMetricsProc = (GridJobMetricsProcessor)comp; else if (comp instanceof GridMetricManager) metricMgr = (GridMetricManager)comp; else if (comp instanceof GridSystemViewManager) sysViewMgr = (GridSystemViewManager)comp; else if (comp instanceof GridCacheProcessor) cacheProc = (GridCacheProcessor)comp; else if (comp instanceof GridClusterStateProcessor) stateProc = (GridClusterStateProcessor)comp; else if (comp instanceof DistributedMetaStorage) distributedMetastorage = (DistributedMetaStorage)comp; else if (comp instanceof DistributedConfigurationProcessor) distributedConfigurationProcessor = (DistributedConfigurationProcessor)comp; else if (comp instanceof GridTaskSessionProcessor) sesProc = (GridTaskSessionProcessor)comp; else if (comp instanceof GridPortProcessor) portProc = (GridPortProcessor)comp; else if (comp instanceof GridClosureProcessor) closProc = (GridClosureProcessor)comp; else if (comp instanceof IgniteServiceProcessor) srvcProc = (IgniteServiceProcessor)comp; else if (comp instanceof IgniteScheduleProcessorAdapter) scheduleProc = (IgniteScheduleProcessorAdapter)comp; else if (comp instanceof GridSegmentationProcessor) segProc = (GridSegmentationProcessor)comp; else if (comp instanceof GridAffinityProcessor) affProc = (GridAffinityProcessor)comp; else if (comp instanceof IgniteRestProcessor) restProc = (IgniteRestProcessor)comp; else if (comp instanceof DataStreamProcessor) dataLdrProc = (DataStreamProcessor)comp; else if (comp instanceof GridContinuousProcessor) contProc = (GridContinuousProcessor)comp; else if (comp instanceof IgniteCacheObjectProcessor) cacheObjProc = (IgniteCacheObjectProcessor)comp; else if (comp instanceof IgnitePluginProcessor) pluginProc = (IgnitePluginProcessor)comp; else if (comp instanceof GridQueryProcessor) qryProc = (GridQueryProcessor)comp; else if (comp instanceof ClientListenerProcessor) clientListenerProc = (ClientListenerProcessor)comp; else if (comp instanceof DataStructuresProcessor) dataStructuresProc = (DataStructuresProcessor)comp; else if (comp instanceof ClusterProcessor) cluster = (ClusterProcessor)comp; else if (comp instanceof PlatformProcessor) platformProc = (PlatformProcessor)comp; else if (comp instanceof PoolProcessor) poolProc = (PoolProcessor)comp; else if (comp instanceof GridMarshallerMappingProcessor) mappingProc = (GridMarshallerMappingProcessor)comp; else if (comp instanceof PdsFoldersResolver) pdsFolderRslvr = (PdsFoldersResolver)comp; else if (comp instanceof GridInternalSubscriptionProcessor) internalSubscriptionProc = (GridInternalSubscriptionProcessor)comp; else if (comp instanceof IgniteSecurity) security = (IgniteSecurity)comp; else if (comp instanceof CompressionProcessor) compressProc = (CompressionProcessor)comp; else if (comp instanceof DiagnosticProcessor) diagnosticProcessor = (DiagnosticProcessor)comp; else if (comp instanceof DurableBackgroundTasksProcessor) durableBackgroundTasksProcessor = (DurableBackgroundTasksProcessor)comp; else if (comp instanceof MaintenanceProcessor) maintenanceProc = (MaintenanceProcessor)comp; else if (comp instanceof CacheObjectTransformerProcessor) transProc = (CacheObjectTransformerProcessor)comp; else if (comp instanceof PerformanceStatisticsProcessor) perfStatProc = (PerformanceStatisticsProcessor)comp; else if (comp instanceof IndexProcessor) indexProc = (IndexProcessor)comp; else if (!(comp instanceof DiscoveryNodeValidationProcessor || comp instanceof PlatformPluginProcessor || comp instanceof QueryEngine)) assert (comp instanceof GridPluginComponent) : "Unknown manager class: " + comp.getClass(); if (addToList) comps.add(comp); } /** * @param helper Helper to add. */ public void addHelper(Object helper) { assert helper != null; assert false : "Unknown helper class: " + helper.getClass(); } /** {@inheritDoc} */ @Override public boolean isStopping() { return ((IgniteKernal)grid).isStopping(); } /** */ @Nullable private ClusterNode localNode() { if (locNode == null && discoMgr != null) locNode = discoMgr.localNode(); return locNode; } /** {@inheritDoc} */ @Override public UUID localNodeId() { ClusterNode locNode0 = localNode(); return locNode0 != null ? locNode0.id() : config().getNodeId(); } /** {@inheritDoc} */ @Override public String igniteInstanceName() { return cfg.getIgniteInstanceName(); } /** {@inheritDoc} */ @Override public GridKernalGateway gateway() { return gw; } /** {@inheritDoc} */ @Override public IgniteEx grid() { return grid; } /** {@inheritDoc} */ @Override public IgniteConfiguration config() { return cfg; } /** {@inheritDoc} */ @Override public GridTaskProcessor task() { return taskProc; } /** {@inheritDoc} */ @Override public GridJobProcessor job() { return jobProc; } /** {@inheritDoc} */ @Override public GridTimeoutProcessor timeout() { return timeProc; } /** {@inheritDoc} */ @Override public GridResourceProcessor resource() { return rsrcProc; } /** {@inheritDoc} */ @Override public GridJobMetricsProcessor jobMetric() { return jobMetricsProc; } /** {@inheritDoc} */ @Override public GridMetricManager metric() { return metricMgr; } /** {@inheritDoc} */ @Override public GridSystemViewManager systemView() { return sysViewMgr; } /** {@inheritDoc} */ @Override public MaintenanceRegistry maintenanceRegistry() { return maintenanceProc; } /** {@inheritDoc} */ @Override public CacheObjectTransformerProcessor transformer() { return transProc; } /** {@inheritDoc} */ @Override public GridCacheProcessor cache() { return cacheProc; } /** {@inheritDoc} */ @Override public GridClusterStateProcessor state() { return stateProc; } /** {@inheritDoc} */ @Override public DistributedMetaStorage distributedMetastorage() { return distributedMetastorage; } /** {@inheritDoc} */ @Override public DistributedConfigurationProcessor distributedConfiguration() { return distributedConfigurationProcessor; } /** {@inheritDoc} */ @Override public Tracing tracing() { return tracingMgr; } /** {@inheritDoc} */ @Override public GridTaskSessionProcessor session() { return sesProc; } /** {@inheritDoc} */ @Override public GridClosureProcessor closure() { return closProc; } /** {@inheritDoc} */ @Override public IgniteServiceProcessor service() { return srvcProc; } /** {@inheritDoc} */ @Override public GridPortProcessor ports() { return portProc; } /** {@inheritDoc} */ @Override public IgniteScheduleProcessorAdapter schedule() { return scheduleProc; } /** {@inheritDoc} */ @Override public GridDeploymentManager deploy() { return depMgr; } /** {@inheritDoc} */ @Override public GridIoManager io() { return ioMgr; } /** {@inheritDoc} */ @Override public GridDiscoveryManager discovery() { return discoMgr; } /** {@inheritDoc} */ @Override public GridCheckpointManager checkpoint() { return cpMgr; } /** {@inheritDoc} */ @Override public GridEventStorageManager event() { return evtMgr; } /** {@inheritDoc} */ @Override public GridFailoverManager failover() { return failoverMgr; } /** {@inheritDoc} */ @Override public GridCollisionManager collision() { return colMgr; } /** {@inheritDoc} */ @Override public IgniteSecurity security() { return security; } /** {@inheritDoc} */ @Override public GridLoadBalancerManager loadBalancing() { return loadMgr; } /** {@inheritDoc} */ @Override public GridIndexingManager indexing() { return indexingMgr; } /** {@inheritDoc} */ @Override public IndexProcessor indexProcessor() { return indexProc; } /** {@inheritDoc} */ @Override public GridEncryptionManager encryption() { return encryptionMgr; } /** {@inheritDoc} */ @Override public IgniteDefragmentation defragmentation() { return defragMgr; } /** {@inheritDoc} */ @Override public WorkersRegistry workersRegistry() { return workersRegistry; } /** {@inheritDoc} */ @Override public GridAffinityProcessor affinity() { return affProc; } /** {@inheritDoc} */ @Override public IgniteRestProcessor rest() { return restProc; } /** {@inheritDoc} */ @Override public GridSegmentationProcessor segmentation() { return segProc; } /** {@inheritDoc} */ @Override public <K, V> DataStreamProcessor<K, V> dataStream() { return (DataStreamProcessor<K, V>)dataLdrProc; } /** {@inheritDoc} */ @Override public GridContinuousProcessor continuous() { return contProc; } /** {@inheritDoc} */ @Override public PoolProcessor pools() { return poolProc; } /** {@inheritDoc} */ @Override public GridMarshallerMappingProcessor mapping() { return mappingProc; } /** {@inheritDoc} */ @Override public IgniteCacheObjectProcessor cacheObjects() { return cacheObjProc; } /** {@inheritDoc} */ @Override public GridQueryProcessor query() { return qryProc; } /** {@inheritDoc} */ @Override public ClientListenerProcessor clientListener() { return clientListenerProc; } /** {@inheritDoc} */ @Override public DataStructuresProcessor dataStructures() { return dataStructuresProc; } /** {@inheritDoc} */ @Override public IgniteLogger log(String ctgr) { return config().getGridLogger().getLogger(ctgr); } /** {@inheritDoc} */ @Override public IgniteLogger log(Class<?> cls) { return log(cls.getName()); } /** {@inheritDoc} */ @Override public GridPerformanceSuggestions performance() { return perf; } /** {@inheritDoc} */ @Override public LongJVMPauseDetector longJvmPauseDetector() { return pauseDetector; } /** {@inheritDoc} */ @Override public DiagnosticProcessor diagnostic() { return diagnosticProcessor; } /** {@inheritDoc} */ @Override public void printMemoryStats() { X.println(">>> "); X.println(">>> Grid memory stats [igniteInstanceName=" + igniteInstanceName() + ']'); for (GridComponent comp : comps) comp.printMemoryStats(); } /** {@inheritDoc} */ @Override public String userVersion(ClassLoader ldr) { return spring != null ? spring.userVersion(ldr, log(spring.getClass())) : U.DFLT_USER_VERSION; } /** {@inheritDoc} */ @Override public PluginProvider pluginProvider(String name) throws PluginNotFoundException { PluginProvider plugin = pluginProc.pluginProvider(name); if (plugin == null) throw new PluginNotFoundException(name); return plugin; } /** {@inheritDoc} */ @Nullable @Override public <T> T createComponent(Class<T> cls) { T res = pluginProc.createComponent(cls); if (res != null) return res; if (cls.equals(IgniteCacheObjectProcessor.class)) return (T)new CacheObjectBinaryProcessorImpl(this); if (cls.equals(CacheConflictResolutionManager.class)) return null; throw new IgniteException("Unsupported component type: " + cls); } /** * @return Plugin manager. */ @Override public IgnitePluginProcessor plugins() { return pluginProc; } /** {@inheritDoc} */ @Override public void writeExternal(ObjectOutput out) throws IOException { U.writeString(out, grid.name()); } /** {@inheritDoc} */ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { U.readString(in); // Read for compatibility only. See #readResolve(). } /** * Reconstructs object on unmarshalling. * * @return Reconstructed object. * @throws ObjectStreamException Thrown in case of unmarshalling error. */ protected Object readResolve() throws ObjectStreamException { try { return IgnitionEx.localIgnite().context(); } catch (IllegalStateException e) { throw U.withCause(new InvalidObjectException(e.getMessage()), e); } finally { stash.remove(); } } /** {@inheritDoc} */ @Override public IgniteExceptionRegistry exceptionRegistry() { return IgniteExceptionRegistry.get(); } /** {@inheritDoc} */ @Override public Object nodeAttribute(String key) { return attrs.get(key); } /** {@inheritDoc} */ @Override public boolean hasNodeAttribute(String key) { return attrs.containsKey(key); } /** {@inheritDoc} */ @Override public Object addNodeAttribute(String key, Object val) { return attrs.put(key, val); } /** {@inheritDoc} */ @Override public Map<String, Object> nodeAttributes() { return attrs; } /** {@inheritDoc} */ @Override public ClusterProcessor cluster() { return cluster; } /** {@inheritDoc} */ @Override public MarshallerContextImpl marshallerContext() { return marshCtx; } /** {@inheritDoc} */ @Override public boolean clientNode() { return cfg.isClientMode(); } /** {@inheritDoc} */ @Override public boolean clientDisconnected() { ClusterNode locNode0 = localNode(); return locNode0 != null ? (locNode0.isClient() && disconnected) : false; } /** {@inheritDoc} */ @Override public PlatformProcessor platform() { return platformProc; } /** {@inheritDoc} */ @Override public GridInternalSubscriptionProcessor internalSubscriptionProcessor() { return internalSubscriptionProc; } /** * @param disconnected Disconnected flag. */ void disconnected(boolean disconnected) { this.disconnected = disconnected; } /** {@inheritDoc} */ @Override public PdsFoldersResolver pdsFolderResolver() { return pdsFolderRslvr; } /** {@inheritDoc} */ @Override public boolean invalid() { FailureProcessor failureProc = failure(); return failureProc != null && failureProc.failureContext() != null && failureProc.failureContext().type() != FailureType.SEGMENTATION; } /** {@inheritDoc} */ @Override public boolean segmented() { FailureProcessor failureProc = failure(); return failureProc != null && failureProc.failureContext() != null && failureProc.failureContext().type() == FailureType.SEGMENTATION; } /** {@inheritDoc} */ @Override public FailureProcessor failure() { return failureProc; } /** {@inheritDoc} */ @Override public Thread.UncaughtExceptionHandler uncaughtExceptionHandler() { return hnd; } /** {@inheritDoc} */ @Override public CompressionProcessor compress() { return compressProc; } /** {@inheritDoc} */ @Override public boolean recoveryMode() { return recoveryMode; } /** * @param recoveryMode Recovery mode. */ public void recoveryMode(boolean recoveryMode) { this.recoveryMode = recoveryMode; } /** {@inheritDoc} */ @Override public String toString() { return S.toString(GridKernalContextImpl.class, this); } /** {@inheritDoc} */ @Override public DurableBackgroundTasksProcessor durableBackgroundTask() { return durableBackgroundTasksProcessor; } /** {@inheritDoc} */ @Override public PerformanceStatisticsProcessor performanceStatistics() { return perfStatProc; } /** {@inheritDoc} */ @Override public Executor getAsyncContinuationExecutor() { return config().getAsyncContinuationExecutor() == null ? ForkJoinPool.commonPool() : config().getAsyncContinuationExecutor(); } /** {@inheritDoc} */ @Override public BinaryMarshaller marshaller() { return marsh; } }
apache/flink
35,218
flink-table/flink-table-api-java-bridge/src/test/java/org/apache/flink/table/factories/DataGenTableSourceFactoryTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.factories; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.connector.datagen.table.DataGenConnectorOptions; import org.apache.flink.connector.datagen.table.DataGenConnectorOptionsUtil; import org.apache.flink.connector.datagen.table.DataGenTableSource; import org.apache.flink.connector.datagen.table.DataGenTableSourceFactory; import org.apache.flink.connector.datagen.table.RandomGeneratorVisitor; import org.apache.flink.legacy.table.connector.source.SourceFunctionProvider; import org.apache.flink.streaming.api.functions.source.datagen.DataGeneratorSource; import org.apache.flink.streaming.api.functions.source.datagen.DataGeneratorSourceTest; import org.apache.flink.streaming.api.functions.source.legacy.SourceFunction; import org.apache.flink.streaming.api.operators.StreamSource; import org.apache.flink.streaming.api.watermark.Watermark; import org.apache.flink.streaming.util.AbstractStreamOperatorTestHarness; import org.apache.flink.table.api.DataTypes; import org.apache.flink.table.api.ValidationException; import org.apache.flink.table.catalog.Column; import org.apache.flink.table.catalog.ResolvedSchema; import org.apache.flink.table.connector.source.DynamicTableSource; import org.apache.flink.table.connector.source.ScanTableSource; import org.apache.flink.table.data.GenericRowData; import org.apache.flink.table.data.RowData; import org.apache.flink.table.descriptors.DescriptorProperties; import org.apache.flink.table.types.DataType; import org.apache.flink.table.types.logical.LogicalType; import org.apache.flink.util.InstantiationUtil; import org.junit.jupiter.api.Test; import javax.annotation.Nullable; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import static org.apache.flink.connector.datagen.table.RandomGeneratorVisitor.RANDOM_COLLECTION_LENGTH_DEFAULT; import static org.apache.flink.core.testutils.FlinkAssertions.anyCauseMatches; import static org.apache.flink.table.factories.utils.FactoryMocks.createTableSource; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; /** Tests for {@link DataGenTableSourceFactory}. */ class DataGenTableSourceFactoryTest { private static final ResolvedSchema SCHEMA = ResolvedSchema.of( Column.physical("f0", DataTypes.STRING()), Column.physical("f1", DataTypes.BIGINT()), Column.physical("f2", DataTypes.BIGINT()), Column.physical("f3", DataTypes.TIMESTAMP()), Column.physical("f4", DataTypes.BINARY(2)), Column.physical("f5", DataTypes.VARBINARY(4)), Column.physical("f6", DataTypes.MAP(DataTypes.INT(), DataTypes.STRING())), Column.physical("f7", DataTypes.STRING())); private static final ResolvedSchema LENGTH_CONSTRAINED_SCHEMA = ResolvedSchema.of( Column.physical("f0", DataTypes.CHAR(50)), Column.physical("f1", DataTypes.BINARY(40)), Column.physical("f2", DataTypes.VARCHAR(30)), Column.physical("f3", DataTypes.VARBINARY(20)), Column.physical("f4", DataTypes.STRING())); private static final ResolvedSchema COLLECTION_SCHEMA = ResolvedSchema.of( Column.physical("f0", DataTypes.ARRAY(DataTypes.STRING())), Column.physical("f1", DataTypes.MAP(DataTypes.STRING(), DataTypes.INT())), Column.physical("f2", DataTypes.MULTISET(DataTypes.INT()))); @Test void testDataTypeCoverage() throws Exception { ResolvedSchema schema = ResolvedSchema.of( Column.physical("f0", DataTypes.CHAR(1)), Column.physical("f1", DataTypes.VARCHAR(10)), Column.physical("f2", DataTypes.STRING()), Column.physical("f3", DataTypes.BOOLEAN()), Column.physical("f4", DataTypes.DECIMAL(32, 2)), Column.physical("f5", DataTypes.TINYINT()), Column.physical("f6", DataTypes.SMALLINT()), Column.physical("f7", DataTypes.INT()), Column.physical("f8", DataTypes.BIGINT()), Column.physical("f9", DataTypes.FLOAT()), Column.physical("f10", DataTypes.DOUBLE()), Column.physical("f11", DataTypes.DATE()), Column.physical("f12", DataTypes.TIME()), Column.physical("f13", DataTypes.TIMESTAMP()), Column.physical("f14", DataTypes.TIMESTAMP_WITH_TIME_ZONE()), Column.physical("f15", DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE()), Column.physical("f16", DataTypes.INTERVAL(DataTypes.DAY())), Column.physical("f17", DataTypes.ARRAY(DataTypes.INT())), Column.physical("f18", DataTypes.MAP(DataTypes.STRING(), DataTypes.DATE())), Column.physical("f19", DataTypes.MULTISET(DataTypes.DECIMAL(32, 2))), Column.physical( "f20", DataTypes.ROW( DataTypes.FIELD("a", DataTypes.BIGINT()), DataTypes.FIELD("b", DataTypes.TIME()), DataTypes.FIELD( "c", DataTypes.ROW( DataTypes.FIELD( "d", DataTypes.TIMESTAMP()))))), Column.physical("f21", DataTypes.BINARY(2)), Column.physical("f22", DataTypes.BYTES()), Column.physical("f23", DataTypes.VARBINARY(4))); DescriptorProperties descriptor = new DescriptorProperties(); descriptor.putString(FactoryUtil.CONNECTOR.key(), "datagen"); descriptor.putString(DataGenConnectorOptions.NUMBER_OF_ROWS.key(), "10"); // add min max option for numeric types descriptor.putString("fields.f4.min", "1.0"); descriptor.putString("fields.f4.max", "1000.0"); descriptor.putString("fields.f5.min", "0"); descriptor.putString("fields.f5.max", "127"); descriptor.putString("fields.f6.min", "0"); descriptor.putString("fields.f6.max", "32767"); descriptor.putString("fields.f7.min", "0"); descriptor.putString("fields.f7.max", "65535"); descriptor.putString("fields.f8.min", "0"); descriptor.putString("fields.f8.max", String.valueOf(Long.MAX_VALUE)); descriptor.putString("fields.f9.min", "0"); descriptor.putString("fields.f9.max", String.valueOf(Float.MAX_VALUE)); descriptor.putString("fields.f10.min", "0"); descriptor.putString("fields.f10.max", String.valueOf(Double.MAX_VALUE)); List<RowData> results = runGenerator(schema, descriptor); assertThat(results).as("Failed to generate all rows").hasSize(10); for (RowData row : results) { for (int i = 0; i < row.getArity(); i++) { assertThat(row.isNullAt(i)) .as("Column " + schema.getColumnNames().get(i) + " should not be null") .isFalse(); } } } @Test void testSource() throws Exception { DescriptorProperties descriptor = new DescriptorProperties(); descriptor.putString(FactoryUtil.CONNECTOR.key(), "datagen"); descriptor.putLong(DataGenConnectorOptions.ROWS_PER_SECOND.key(), 100); descriptor.putString( DataGenConnectorOptionsUtil.FIELDS + ".f0." + DataGenConnectorOptionsUtil.KIND, DataGenConnectorOptionsUtil.RANDOM); descriptor.putLong( DataGenConnectorOptionsUtil.FIELDS + ".f0." + DataGenConnectorOptionsUtil.LENGTH, 20); descriptor.putString( DataGenConnectorOptionsUtil.FIELDS + ".f1." + DataGenConnectorOptionsUtil.KIND, DataGenConnectorOptionsUtil.RANDOM); descriptor.putLong( DataGenConnectorOptionsUtil.FIELDS + ".f1." + DataGenConnectorOptionsUtil.MIN, 10); descriptor.putLong( DataGenConnectorOptionsUtil.FIELDS + ".f1." + DataGenConnectorOptionsUtil.MAX, 100); descriptor.putString( DataGenConnectorOptionsUtil.FIELDS + ".f2." + DataGenConnectorOptionsUtil.KIND, DataGenConnectorOptionsUtil.SEQUENCE); descriptor.putLong( DataGenConnectorOptionsUtil.FIELDS + ".f2." + DataGenConnectorOptionsUtil.START, 50); descriptor.putLong( DataGenConnectorOptionsUtil.FIELDS + ".f2." + DataGenConnectorOptionsUtil.END, 60); descriptor.putString( DataGenConnectorOptionsUtil.FIELDS + ".f3." + DataGenConnectorOptionsUtil.KIND, DataGenConnectorOptionsUtil.RANDOM); descriptor.putString( DataGenConnectorOptionsUtil.FIELDS + ".f3." + DataGenConnectorOptionsUtil.MAX_PAST, "5s"); descriptor.putString( DataGenConnectorOptionsUtil.FIELDS + ".f5." + DataGenConnectorOptionsUtil.KIND, DataGenConnectorOptionsUtil.SEQUENCE); descriptor.putLong( DataGenConnectorOptionsUtil.FIELDS + ".f5." + DataGenConnectorOptionsUtil.START, 1); descriptor.putLong( DataGenConnectorOptionsUtil.FIELDS + ".f5." + DataGenConnectorOptionsUtil.END, 11); descriptor.putString( DataGenConnectorOptionsUtil.FIELDS + ".f6.key." + DataGenConnectorOptionsUtil.NULL_RATE, "1"); descriptor.putString( DataGenConnectorOptionsUtil.FIELDS + ".f7." + DataGenConnectorOptionsUtil.NULL_RATE, "1"); final long begin = System.currentTimeMillis(); List<RowData> results = runGenerator(SCHEMA, descriptor); final long end = System.currentTimeMillis(); assertThat(results).hasSize(11); for (int i = 0; i < results.size(); i++) { RowData row = results.get(i); assertThat(row.getString(0).toString()).hasSize(20); assertThat(row.getLong(1)).isBetween(10L, 100L); assertThat(row.getLong(2)).isEqualTo(i + 50); assertThat(row.getTimestamp(3, 3).getMillisecond()).isBetween(begin - 5000, end); assertThat(row.getBinary(4)).hasSize(2); // f5 is sequence bytes produced in sequence long [1, 11] assertThat(row.getBinary(5)).hasSize(8); assertThat(row.getBinary(5)[row.getBinary(5).length - 1]).isEqualTo((byte) (i + 1)); assertThat(row.getMap(6).keyArray().isNullAt(0)).isTrue(); assertThat(row.getString(7)).isNull(); } } @Test void testVariableLengthDataGeneration() throws Exception { DescriptorProperties descriptor = new DescriptorProperties(); final int rowsNumber = 999; descriptor.putString(FactoryUtil.CONNECTOR.key(), "datagen"); descriptor.putLong(DataGenConnectorOptions.NUMBER_OF_ROWS.key(), rowsNumber); ResolvedSchema schema = ResolvedSchema.of( Column.physical("f0", DataTypes.STRING()), Column.physical("f1", DataTypes.VARCHAR(20)), Column.physical("f2", DataTypes.BYTES()), Column.physical("f3", DataTypes.VARBINARY(4)), Column.physical("f4", DataTypes.BINARY(2))); List<RowData> results = runGenerator(schema, descriptor); assertThat(results).hasSize(rowsNumber); for (RowData row : results) { assertThat(row.getString(0).toString()) .hasSize(RandomGeneratorVisitor.RANDOM_STRING_LENGTH_DEFAULT); assertThat(row.getString(1).toString()).hasSize(20); assertThat(row.getBinary(2)) .hasSize(RandomGeneratorVisitor.RANDOM_BYTES_LENGTH_DEFAULT); assertThat(row.getBinary(3)).hasSize(4); } descriptor.putBoolean( DataGenConnectorOptionsUtil.FIELDS + ".f0." + DataGenConnectorOptionsUtil.VAR_LEN, true); descriptor.putBoolean( DataGenConnectorOptionsUtil.FIELDS + ".f1." + DataGenConnectorOptionsUtil.VAR_LEN, true); descriptor.putBoolean( DataGenConnectorOptionsUtil.FIELDS + ".f2." + DataGenConnectorOptionsUtil.VAR_LEN, true); descriptor.putBoolean( DataGenConnectorOptionsUtil.FIELDS + ".f3." + DataGenConnectorOptionsUtil.VAR_LEN, true); results = runGenerator(schema, descriptor); Set<Integer> sizeString = new HashSet<>(); Set<Integer> sizeVarChar = new HashSet<>(); Set<Integer> sizeBytes = new HashSet<>(); Set<Integer> sizeVarBinary = new HashSet<>(); for (RowData row : results) { assertThat(row.getString(0).toString()) .hasSizeBetween(1, RandomGeneratorVisitor.RANDOM_STRING_LENGTH_DEFAULT); assertThat(row.getString(1).toString()) .hasSizeBetween(1, RandomGeneratorVisitor.RANDOM_STRING_LENGTH_DEFAULT); assertThat(row.getBinary(2)) .hasSizeBetween(1, RandomGeneratorVisitor.RANDOM_BYTES_LENGTH_DEFAULT); assertThat(row.getBinary(3)) .hasSizeBetween(1, RandomGeneratorVisitor.RANDOM_BYTES_LENGTH_DEFAULT); sizeString.add(row.getString(0).toString().length()); sizeVarChar.add(row.getString(1).toString().length()); sizeBytes.add(row.getBinary(2).length); sizeVarBinary.add(row.getBinary(3).length); } assertThat(sizeString.size()).isGreaterThan(1); assertThat(sizeBytes.size()).isGreaterThan(1); assertThat(sizeVarBinary.size()).isGreaterThan(1); assertThat(sizeVarChar.size()).isGreaterThan(1); assertException( schema, descriptor, "f4", null, true, String.format( "Only supports specifying '%s' option for variable-length types (VARCHAR/STRING/VARBINARY/BYTES). The type of field '%s' is not within this range.", DataGenConnectorOptions.FIELD_VAR_LEN.key(), "f4")); } @Test void testVariableLengthDataType() throws Exception { DescriptorProperties descriptor = new DescriptorProperties(); final int rowsNumber = 200; descriptor.putString(FactoryUtil.CONNECTOR.key(), "datagen"); descriptor.putLong(DataGenConnectorOptions.NUMBER_OF_ROWS.key(), rowsNumber); List<RowData> results = runGenerator(LENGTH_CONSTRAINED_SCHEMA, descriptor); assertThat(results).hasSize(rowsNumber); for (RowData row : results) { assertThat(row.getString(2).toString()).hasSize(30); assertThat(row.getBinary(3)).hasSize(20); assertThat(row.getString(4).toString()) .hasSize(RandomGeneratorVisitor.RANDOM_STRING_LENGTH_DEFAULT); } descriptor.putString( DataGenConnectorOptionsUtil.FIELDS + ".f2." + DataGenConnectorOptionsUtil.KIND, DataGenConnectorOptionsUtil.RANDOM); descriptor.putLong( DataGenConnectorOptionsUtil.FIELDS + ".f2." + DataGenConnectorOptionsUtil.LENGTH, 25); descriptor.putString( DataGenConnectorOptionsUtil.FIELDS + ".f4." + DataGenConnectorOptionsUtil.KIND, DataGenConnectorOptionsUtil.RANDOM); descriptor.putLong( DataGenConnectorOptionsUtil.FIELDS + ".f4." + DataGenConnectorOptionsUtil.LENGTH, 9999); results = runGenerator(LENGTH_CONSTRAINED_SCHEMA, descriptor); for (RowData row : results) { assertThat(row.getString(2).toString()).hasSize(25); assertThat(row.getString(4).toString()).hasSize(9999); } assertException( LENGTH_CONSTRAINED_SCHEMA, descriptor, "f3", 21, null, "Custom length '21' for variable-length type (VARCHAR/STRING/VARBINARY/BYTES) field 'f3' should be shorter than '20' defined in the schema."); } @Test void testLengthForCollectionType() throws Exception { DescriptorProperties descriptor = new DescriptorProperties(); final int rowsNumber = 200; final int collectionSize = 10; descriptor.putString(FactoryUtil.CONNECTOR.key(), "datagen"); descriptor.putLong(DataGenConnectorOptions.NUMBER_OF_ROWS.key(), rowsNumber); // test for default length. List<RowData> results = runGenerator(COLLECTION_SCHEMA, descriptor); assertThat(results).hasSize(rowsNumber); for (RowData row : results) { assertThat(row.getArray(0).size()).isEqualTo(RANDOM_COLLECTION_LENGTH_DEFAULT); assertThat(row.getMap(1).size()) .isEqualTo(RandomGeneratorVisitor.RANDOM_COLLECTION_LENGTH_DEFAULT); assertThat(row.getMap(2).size()) .isEqualTo(RandomGeneratorVisitor.RANDOM_COLLECTION_LENGTH_DEFAULT); } // test for provided length. descriptor.putLong( DataGenConnectorOptionsUtil.FIELDS + ".f0." + DataGenConnectorOptionsUtil.LENGTH, collectionSize); descriptor.putLong( DataGenConnectorOptionsUtil.FIELDS + ".f1." + DataGenConnectorOptionsUtil.LENGTH, collectionSize); descriptor.putLong( DataGenConnectorOptionsUtil.FIELDS + ".f2." + DataGenConnectorOptionsUtil.LENGTH, collectionSize); results = runGenerator(COLLECTION_SCHEMA, descriptor); assertThat(results).hasSize(rowsNumber); for (RowData row : results) { assertThat(row.getArray(0).size()).isEqualTo(collectionSize); assertThat(row.getMap(1).size()).isEqualTo(collectionSize); assertThat(row.getMap(2).size()).isEqualTo(collectionSize); } } @Test void testFixedLengthDataType() throws Exception { DescriptorProperties descriptor = new DescriptorProperties(); final int rowsNumber = 200; descriptor.putString(FactoryUtil.CONNECTOR.key(), "datagen"); descriptor.putLong(DataGenConnectorOptions.NUMBER_OF_ROWS.key(), rowsNumber); List<RowData> results = runGenerator(LENGTH_CONSTRAINED_SCHEMA, descriptor); assertThat(results).hasSize(rowsNumber); for (RowData row : results) { assertThat(row.getString(0).toString()).hasSize(50); assertThat(row.getBinary(1)).hasSize(40); } assertException( LENGTH_CONSTRAINED_SCHEMA, descriptor, "f0", 20, null, "Custom length for fixed-length type (CHAR/BINARY) field 'f0' is not supported."); } private List<RowData> runGenerator(ResolvedSchema schema, DescriptorProperties descriptor) throws Exception { DynamicTableSource source = createTableSource(schema, descriptor.asMap()); assertThat(source).isInstanceOf(DataGenTableSource.class); DataGenTableSource dataGenTableSource = (DataGenTableSource) source; DataGeneratorSource<RowData> gen = dataGenTableSource.createSource(); // test java serialization. gen = InstantiationUtil.clone(gen); StreamSource<RowData, DataGeneratorSource<RowData>> src = new StreamSource<>(gen); AbstractStreamOperatorTestHarness<RowData> testHarness = new AbstractStreamOperatorTestHarness<>(src, 1, 1, 0); testHarness.open(); TestContext ctx = new TestContext(); gen.run(ctx); return ctx.results; } @Test void testSequenceCheckpointRestore() throws Exception { DescriptorProperties descriptor = new DescriptorProperties(); descriptor.putString(FactoryUtil.CONNECTOR.key(), "datagen"); descriptor.putString( DataGenConnectorOptionsUtil.FIELDS + ".f0." + DataGenConnectorOptionsUtil.KIND, DataGenConnectorOptionsUtil.SEQUENCE); descriptor.putLong( DataGenConnectorOptionsUtil.FIELDS + ".f0." + DataGenConnectorOptionsUtil.START, 0); descriptor.putLong( DataGenConnectorOptionsUtil.FIELDS + ".f0." + DataGenConnectorOptionsUtil.END, 100); DynamicTableSource dynamicTableSource = createTableSource( ResolvedSchema.of(Column.physical("f0", DataTypes.BIGINT())), descriptor.asMap()); DataGenTableSource dataGenTableSource = (DataGenTableSource) dynamicTableSource; DataGeneratorSource<RowData> source = dataGenTableSource.createSource(); final int initElement = 0; final int maxElement = 100; final Set<RowData> expectedOutput = new HashSet<>(); for (long i = initElement; i <= maxElement; i++) { expectedOutput.add(GenericRowData.of(i)); } DataGeneratorSourceTest.innerTestDataGenCheckpointRestore( () -> { try { return InstantiationUtil.clone(source); } catch (IOException | ClassNotFoundException e) { throw new RuntimeException(e); } }, expectedOutput); } @Test void testLackStartForSequence() { assertThatThrownBy( () -> { DescriptorProperties descriptor = new DescriptorProperties(); descriptor.putString(FactoryUtil.CONNECTOR.key(), "datagen"); descriptor.putString( DataGenConnectorOptionsUtil.FIELDS + ".f0." + DataGenConnectorOptionsUtil.KIND, DataGenConnectorOptionsUtil.SEQUENCE); descriptor.putLong( DataGenConnectorOptionsUtil.FIELDS + ".f0." + DataGenConnectorOptionsUtil.END, 100); createTableSource( ResolvedSchema.of(Column.physical("f0", DataTypes.BIGINT())), descriptor.asMap()); }) .satisfies( anyCauseMatches( ValidationException.class, "Could not find required property 'fields.f0.start' for sequence generator.")); } @Test void testLackEndForSequence() { assertThatThrownBy( () -> { DescriptorProperties descriptor = new DescriptorProperties(); descriptor.putString(FactoryUtil.CONNECTOR.key(), "datagen"); descriptor.putString( DataGenConnectorOptionsUtil.FIELDS + ".f0." + DataGenConnectorOptionsUtil.KIND, DataGenConnectorOptionsUtil.SEQUENCE); descriptor.putLong( DataGenConnectorOptionsUtil.FIELDS + ".f0." + DataGenConnectorOptionsUtil.START, 0); createTableSource( ResolvedSchema.of(Column.physical("f0", DataTypes.BIGINT())), descriptor.asMap()); }) .satisfies( anyCauseMatches( ValidationException.class, "Could not find required property 'fields.f0.end' for sequence generator.")); } @Test void testWrongKey() { assertThatThrownBy( () -> { DescriptorProperties descriptor = new DescriptorProperties(); descriptor.putString(FactoryUtil.CONNECTOR.key(), "datagen"); descriptor.putLong("wrong-rows-per-second", 1); createTableSource( ResolvedSchema.of(Column.physical("f0", DataTypes.BIGINT())), descriptor.asMap()); }) .satisfies( anyCauseMatches( ValidationException.class, "Unsupported options:\n\nwrong-rows-per-second")); } @Test void testWrongStartInRandom() { assertThatThrownBy( () -> { DescriptorProperties descriptor = new DescriptorProperties(); descriptor.putString(FactoryUtil.CONNECTOR.key(), "datagen"); descriptor.putString( DataGenConnectorOptionsUtil.FIELDS + ".f0." + DataGenConnectorOptionsUtil.KIND, DataGenConnectorOptionsUtil.RANDOM); descriptor.putLong( DataGenConnectorOptionsUtil.FIELDS + ".f0." + DataGenConnectorOptionsUtil.START, 0); createTableSource( ResolvedSchema.of(Column.physical("f0", DataTypes.BIGINT())), descriptor.asMap()); }) .satisfies( anyCauseMatches( ValidationException.class, "Unsupported options:\n\nfields.f0.start")); } @Test void testWrongLenInRandomLong() { assertThatThrownBy( () -> { DescriptorProperties descriptor = new DescriptorProperties(); descriptor.putString(FactoryUtil.CONNECTOR.key(), "datagen"); descriptor.putString( DataGenConnectorOptionsUtil.FIELDS + ".f0." + DataGenConnectorOptionsUtil.KIND, DataGenConnectorOptionsUtil.RANDOM); descriptor.putInt( DataGenConnectorOptionsUtil.FIELDS + ".f0." + DataGenConnectorOptionsUtil.LENGTH, 100); createTableSource( ResolvedSchema.of(Column.physical("f0", DataTypes.BIGINT())), descriptor.asMap()); }) .satisfies( anyCauseMatches( ValidationException.class, "Unsupported options:\n\nfields.f0.length")); } @Test void testWrongTypes() { assertThatThrownBy( () -> { DescriptorProperties descriptor = new DescriptorProperties(); descriptor.putString(FactoryUtil.CONNECTOR.key(), "datagen"); descriptor.putString( DataGenConnectorOptionsUtil.FIELDS + ".f0." + DataGenConnectorOptionsUtil.KIND, DataGenConnectorOptionsUtil.SEQUENCE); descriptor.putString( DataGenConnectorOptionsUtil.FIELDS + ".f0." + DataGenConnectorOptionsUtil.START, "Wrong"); descriptor.putString( DataGenConnectorOptionsUtil.FIELDS + ".f0." + DataGenConnectorOptionsUtil.END, "Wrong"); createTableSource( ResolvedSchema.of(Column.physical("f0", DataTypes.BIGINT())), descriptor.asMap()); }) .satisfies( anyCauseMatches("Could not parse value 'Wrong' for key 'fields.f0.start'")); } @Test void testWithParallelism() { ResolvedSchema schema = ResolvedSchema.of(Column.physical("f0", DataTypes.CHAR(1))); Map<String, String> options = new HashMap<>(); options.put(FactoryUtil.CONNECTOR.key(), "datagen"); options.put(DataGenConnectorOptions.SOURCE_PARALLELISM.key(), "10"); DynamicTableSource source = createTableSource(schema, options); assertThat(source).isInstanceOf(DataGenTableSource.class); DataGenTableSource dataGenTableSource = (DataGenTableSource) source; ScanTableSource.ScanRuntimeProvider scanRuntimeProvider = dataGenTableSource.getScanRuntimeProvider(new TestScanContext()); assertThat(scanRuntimeProvider).isInstanceOf(SourceFunctionProvider.class); SourceFunctionProvider sourceFunctionProvider = (SourceFunctionProvider) scanRuntimeProvider; assertThat(sourceFunctionProvider.getParallelism()).hasValue(10); } private void assertException( ResolvedSchema schema, DescriptorProperties descriptor, String fieldName, @Nullable Integer len, @Nullable Boolean varLen, String expectedMessage) { assertThatThrownBy( () -> { descriptor.putString( String.join( ".", DataGenConnectorOptionsUtil.FIELDS, fieldName, DataGenConnectorOptionsUtil.KIND), DataGenConnectorOptionsUtil.RANDOM); if (len != null) { descriptor.putLong( String.join( ".", DataGenConnectorOptionsUtil.FIELDS, fieldName, DataGenConnectorOptionsUtil.LENGTH), len); } if (varLen != null) { descriptor.putBoolean( String.join( ".", DataGenConnectorOptionsUtil.FIELDS, fieldName, DataGenConnectorOptionsUtil.VAR_LEN), varLen); } runGenerator(schema, descriptor); }) .satisfies(anyCauseMatches(ValidationException.class, expectedMessage)); } private static class TestContext implements SourceFunction.SourceContext<RowData> { private final Object lock = new Object(); private final List<RowData> results = new ArrayList<>(); @Override public void collect(RowData element) { results.add(element); } @Override public void collectWithTimestamp(RowData element, long timestamp) {} @Override public void emitWatermark(Watermark mark) {} @Override public void markAsTemporarilyIdle() {} @Override public Object getCheckpointLock() { return lock; } @Override public void close() {} } private static class TestScanContext implements ScanTableSource.ScanContext { @Override public <T> TypeInformation<T> createTypeInformation(DataType producedDataType) { return null; } @Override public <T> TypeInformation<T> createTypeInformation(LogicalType producedLogicalType) { return null; } @Override public DynamicTableSource.DataStructureConverter createDataStructureConverter( DataType producedDataType) { return null; } } }
apache/paimon
34,579
paimon-flink/paimon-flink-common/src/test/java/org/apache/paimon/flink/iceberg/FlinkIcebergITCaseBase.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.paimon.flink.iceberg; import org.apache.paimon.flink.util.AbstractTestBase; import org.apache.flink.table.api.TableEnvironment; import org.apache.flink.table.api.TableResult; import org.apache.flink.types.Row; import org.apache.flink.util.CloseableIterator; import org.apache.hadoop.conf.Configuration; import org.apache.iceberg.catalog.TableIdentifier; import org.apache.iceberg.data.IcebergGenerics; import org.apache.iceberg.data.Record; import org.apache.iceberg.expressions.Expressions; import org.apache.iceberg.hadoop.HadoopCatalog; import org.apache.iceberg.io.CloseableIterable; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; import java.util.ArrayList; import java.util.List; import static org.assertj.core.api.Assertions.assertThat; /** IT cases for Paimon Iceberg compatibility. */ public abstract class FlinkIcebergITCaseBase extends AbstractTestBase { @ParameterizedTest @ValueSource(strings = {"orc", "parquet", "avro"}) public void testPrimaryKeyTable(String format) throws Exception { String warehouse = getTempDirPath(); TableEnvironment tEnv = tableEnvironmentBuilder().batchMode().parallelism(2).build(); tEnv.executeSql( "CREATE CATALOG paimon WITH (\n" + " 'type' = 'paimon',\n" + " 'warehouse' = '" + warehouse + "'\n" + ")"); tEnv.executeSql( "CREATE TABLE paimon.`default`.T (\n" + " pt INT,\n" + " k INT,\n" + " v1 INT,\n" + " v2 STRING,\n" + " PRIMARY KEY (pt, k) NOT ENFORCED\n" + ") PARTITIONED BY (pt) WITH (\n" + " 'metadata.iceberg.storage' = 'hadoop-catalog',\n" // make sure all changes are visible in iceberg metadata + " 'full-compaction.delta-commits' = '1',\n" + " 'file.format' = '" + format + "'\n" + ")"); tEnv.executeSql( "INSERT INTO paimon.`default`.T VALUES " + "(1, 10, 100, 'apple'), " + "(1, 11, 110, 'banana'), " + "(2, 20, 200, 'cat'), " + "(2, 21, 210, 'dog')") .await(); tEnv.executeSql( "CREATE CATALOG iceberg WITH (\n" + " 'type' = 'iceberg',\n" + " 'catalog-type' = 'hadoop',\n" + " 'warehouse' = '" + warehouse + "/iceberg',\n" + " 'cache-enabled' = 'false'\n" + ")"); assertThat( collect( tEnv.executeSql( "SELECT v1, k, v2, pt FROM iceberg.`default`.T ORDER BY pt, k"))) .containsExactly( Row.of(100, 10, "apple", 1), Row.of(110, 11, "banana", 1), Row.of(200, 20, "cat", 2), Row.of(210, 21, "dog", 2)); tEnv.executeSql( "INSERT INTO paimon.`default`.T VALUES " + "(1, 10, 101, 'red'), " + "(1, 12, 121, 'green'), " + "(2, 20, 201, 'blue'), " + "(2, 22, 221, 'yellow')") .await(); assertThat( collect( tEnv.executeSql( "SELECT v1, k, v2, pt FROM iceberg.`default`.T ORDER BY pt, k"))) .containsExactly( Row.of(101, 10, "red", 1), Row.of(110, 11, "banana", 1), Row.of(121, 12, "green", 1), Row.of(201, 20, "blue", 2), Row.of(210, 21, "dog", 2), Row.of(221, 22, "yellow", 2)); } @ParameterizedTest @ValueSource(strings = {"orc", "parquet", "avro"}) public void testAppendOnlyTable(String format) throws Exception { String warehouse = getTempDirPath(); TableEnvironment tEnv = tableEnvironmentBuilder().batchMode().parallelism(2).build(); tEnv.executeSql( "CREATE CATALOG paimon WITH (\n" + " 'type' = 'paimon',\n" + " 'warehouse' = '" + warehouse + "'\n" + ")"); tEnv.executeSql( "CREATE TABLE paimon.`default`.cities (\n" + " country STRING,\n" + " name STRING\n" + ") WITH (\n" + " 'metadata.iceberg.storage' = 'hadoop-catalog',\n" + " 'file.format' = '" + format + "'\n" + ")"); tEnv.executeSql( "INSERT INTO paimon.`default`.cities VALUES " + "('usa', 'new york'), " + "('germany', 'berlin'), " + "('usa', 'chicago'), " + "('germany', 'hamburg')") .await(); tEnv.executeSql( "CREATE CATALOG iceberg WITH (\n" + " 'type' = 'iceberg',\n" + " 'catalog-type' = 'hadoop',\n" + " 'warehouse' = '" + warehouse + "/iceberg',\n" + " 'cache-enabled' = 'false'\n" + ")"); assertThat(collect(tEnv.executeSql("SELECT name, country FROM iceberg.`default`.cities"))) .containsExactlyInAnyOrder( Row.of("new york", "usa"), Row.of("chicago", "usa"), Row.of("berlin", "germany"), Row.of("hamburg", "germany")); tEnv.executeSql( "INSERT INTO paimon.`default`.cities VALUES " + "('usa', 'houston'), " + "('germany', 'munich')") .await(); assertThat( collect( tEnv.executeSql( "SELECT name FROM iceberg.`default`.cities WHERE country = 'germany'"))) .containsExactlyInAnyOrder(Row.of("berlin"), Row.of("hamburg"), Row.of("munich")); } @ParameterizedTest @ValueSource(strings = {"orc", "parquet", "avro"}) public void testFilterAllTypes(String format) throws Exception { String warehouse = getTempDirPath(); TableEnvironment tEnv = tableEnvironmentBuilder().batchMode().parallelism(2).build(); tEnv.executeSql( "CREATE CATALOG paimon WITH (\n" + " 'type' = 'paimon',\n" + " 'warehouse' = '" + warehouse + "'\n" + ")"); tEnv.executeSql( "CREATE TABLE paimon.`default`.T (\n" + " pt INT,\n" + " id INT," + " v_int INT,\n" + " v_tinyint TINYINT,\n" + " v_smallint SMALLINT,\n" + " v_boolean BOOLEAN,\n" + " v_bigint BIGINT,\n" + " v_float FLOAT,\n" + " v_double DOUBLE,\n" + " v_decimal DECIMAL(8, 3),\n" + " v_varchar STRING,\n" + " v_varbinary VARBINARY(20),\n" + " v_date DATE,\n" // it seems that Iceberg Flink connector has some bug when filtering a // timestamp_ltz, so we don't test it here + " v_timestamp TIMESTAMP(6)\n" + ") PARTITIONED BY (pt) WITH (\n" + " 'metadata.iceberg.storage' = 'hadoop-catalog',\n" + " 'file.format' = '" + format + "'\n" + ")"); tEnv.executeSql( "INSERT INTO paimon.`default`.T VALUES " + "(1, 1, 1, CAST(1 AS TINYINT), CAST(1 as SMALLINT), true, 10, CAST(100.0 AS FLOAT), 1000.0, 123.456, 'cat', CAST('B_cat' AS VARBINARY(20)), DATE '2024-10-10', TIMESTAMP '2024-10-10 11:22:33.123456'), " + "(2, 2, 2, CAST(2 AS TINYINT), CAST(2 as SMALLINT), false, 20, CAST(200.0 AS FLOAT), 2000.0, 234.567, 'dog', CAST('B_dog' AS VARBINARY(20)), DATE '2024-10-20', TIMESTAMP '2024-10-20 11:22:33.123456'), " + "(3, 3, CAST(NULL AS INT), CAST(NULL AS TINYINT), CAST(NULL AS SMALLINT), CAST(NULL AS BOOLEAN), CAST(NULL AS BIGINT), CAST(NULL AS FLOAT), CAST(NULL AS DOUBLE), CAST(NULL AS DECIMAL(8, 3)), CAST(NULL AS STRING), CAST(NULL AS VARBINARY(20)), CAST(NULL AS DATE), CAST(NULL AS TIMESTAMP(6)))") .await(); tEnv.executeSql( "CREATE CATALOG iceberg WITH (\n" + " 'type' = 'iceberg',\n" + " 'catalog-type' = 'hadoop',\n" + " 'warehouse' = '" + warehouse + "/iceberg',\n" + " 'cache-enabled' = 'false'\n" + ")"); tEnv.executeSql("USE CATALOG iceberg"); assertThat(collect(tEnv.executeSql("SELECT id FROM T where pt = 1"))) .containsExactly(Row.of(1)); assertThat(collect(tEnv.executeSql("SELECT id FROM T where v_int = 1"))) .containsExactly(Row.of(1)); assertThat(collect(tEnv.executeSql("SELECT id FROM T where v_tinyint = 1"))) .containsExactly(Row.of(1)); assertThat(collect(tEnv.executeSql("SELECT id FROM T where v_smallint = 1"))) .containsExactly(Row.of(1)); assertThat(collect(tEnv.executeSql("SELECT id FROM T where v_boolean = true"))) .containsExactly(Row.of(1)); assertThat(collect(tEnv.executeSql("SELECT id FROM T where v_bigint = 10"))) .containsExactly(Row.of(1)); assertThat(collect(tEnv.executeSql("SELECT id FROM T where v_float = 100.0"))) .containsExactly(Row.of(1)); assertThat(collect(tEnv.executeSql("SELECT id FROM T where v_double = 1000.0"))) .containsExactly(Row.of(1)); assertThat(collect(tEnv.executeSql("SELECT id FROM T where v_decimal = 123.456"))) .containsExactly(Row.of(1)); assertThat(collect(tEnv.executeSql("SELECT id FROM T where v_varchar = 'cat'"))) .containsExactly(Row.of(1)); assertThat(collect(tEnv.executeSql("SELECT id FROM T where v_date = '2024-10-10'"))) .containsExactly(Row.of(1)); assertThat( collect( tEnv.executeSql( "SELECT id FROM T where v_timestamp = TIMESTAMP '2024-10-10 11:22:33.123456'"))) .containsExactly(Row.of(1)); assertThat(collect(tEnv.executeSql("SELECT id FROM T where v_int IS NULL"))) .containsExactly(Row.of(3)); assertThat(collect(tEnv.executeSql("SELECT id FROM T where v_tinyint IS NULL"))) .containsExactly(Row.of(3)); assertThat(collect(tEnv.executeSql("SELECT id FROM T where v_smallint IS NULL"))) .containsExactly(Row.of(3)); assertThat(collect(tEnv.executeSql("SELECT id FROM T where v_boolean IS NULL"))) .containsExactly(Row.of(3)); assertThat(collect(tEnv.executeSql("SELECT id FROM T where v_bigint IS NULL"))) .containsExactly(Row.of(3)); assertThat(collect(tEnv.executeSql("SELECT id FROM T where v_float IS NULL"))) .containsExactly(Row.of(3)); assertThat(collect(tEnv.executeSql("SELECT id FROM T where v_double IS NULL"))) .containsExactly(Row.of(3)); assertThat(collect(tEnv.executeSql("SELECT id FROM T where v_decimal IS NULL"))) .containsExactly(Row.of(3)); assertThat(collect(tEnv.executeSql("SELECT id FROM T where v_varchar IS NULL"))) .containsExactly(Row.of(3)); assertThat(collect(tEnv.executeSql("SELECT id FROM T where v_varbinary IS NULL"))) .containsExactly(Row.of(3)); assertThat(collect(tEnv.executeSql("SELECT id FROM T where v_date IS NULL"))) .containsExactly(Row.of(3)); assertThat(collect(tEnv.executeSql("SELECT id FROM T where v_timestamp IS NULL"))) .containsExactly(Row.of(3)); } @ParameterizedTest // orc writer does not write timestamp_ltz correctly, however we won't fix it due to // compatibility concern, so we don't test orc here @ValueSource(strings = {"parquet"}) public void testFilterTimestampLtz(String format) throws Exception { String warehouse = getTempDirPath(); TableEnvironment tEnv = tableEnvironmentBuilder().batchMode().parallelism(2).build(); tEnv.executeSql( "CREATE CATALOG paimon WITH (\n" + " 'type' = 'paimon',\n" + " 'warehouse' = '" + warehouse + "'\n" + ")"); tEnv.executeSql( "CREATE TABLE paimon.`default`.T (\n" + " id INT," + " v_timestampltz TIMESTAMP_LTZ(6)\n" + ") WITH (\n" + " 'metadata.iceberg.storage' = 'hadoop-catalog',\n" + " 'file.format' = '" + format + "'\n" + ")"); tEnv.executeSql( "INSERT INTO paimon.`default`.T VALUES " + "(1, CAST(TO_TIMESTAMP_LTZ(1100000000321, 3) AS TIMESTAMP_LTZ(6))), " + "(2, CAST(TO_TIMESTAMP_LTZ(1200000000321, 3) AS TIMESTAMP_LTZ(6))), " + "(3, CAST(NULL AS TIMESTAMP_LTZ(6)))") .await(); HadoopCatalog icebergCatalog = new HadoopCatalog(new Configuration(), warehouse + "/iceberg"); TableIdentifier icebergIdentifier = TableIdentifier.of("default", "T"); org.apache.iceberg.Table icebergTable = icebergCatalog.loadTable(icebergIdentifier); CloseableIterable<Record> result = IcebergGenerics.read(icebergTable) .where(Expressions.equal("v_timestampltz", 1100000000321000L)) .build(); List<Object> actual = new ArrayList<>(); for (Record record : result) { actual.add(record.get(0)); } result.close(); assertThat(actual).containsExactly(1); result = IcebergGenerics.read(icebergTable) .where(Expressions.isNull("v_timestampltz")) .build(); actual = new ArrayList<>(); for (Record record : result) { actual.add(record.get(0)); } result.close(); assertThat(actual).containsExactly(3); } @ParameterizedTest @ValueSource(strings = {"orc", "parquet", "avro"}) public void testDropAndRecreateTable(String format) throws Exception { String warehouse = getTempDirPath(); TableEnvironment tEnv = tableEnvironmentBuilder().batchMode().parallelism(2).build(); tEnv.executeSql( "CREATE CATALOG paimon WITH (\n" + " 'type' = 'paimon',\n" + " 'warehouse' = '" + warehouse + "'\n" + ")"); String createTableDdl = "CREATE TABLE paimon.`default`.cities (\n" + " country STRING,\n" + " name STRING\n" + ") WITH (\n" + " 'metadata.iceberg.storage' = 'hadoop-catalog',\n" + " 'file.format' = '" + format + "'\n" + ")"; tEnv.executeSql(createTableDdl); tEnv.executeSql( "INSERT INTO paimon.`default`.cities VALUES " + "('usa', 'new york'), " + "('germany', 'berlin')") .await(); tEnv.executeSql( "CREATE CATALOG iceberg WITH (\n" + " 'type' = 'iceberg',\n" + " 'catalog-type' = 'hadoop',\n" + " 'warehouse' = '" + warehouse + "/iceberg',\n" + " 'cache-enabled' = 'false'\n" + ")"); assertThat(collect(tEnv.executeSql("SELECT name, country FROM iceberg.`default`.cities"))) .containsExactlyInAnyOrder(Row.of("new york", "usa"), Row.of("berlin", "germany")); tEnv.executeSql( "INSERT INTO paimon.`default`.cities VALUES " + "('usa', 'chicago'), " + "('germany', 'hamburg')") .await(); assertThat(collect(tEnv.executeSql("SELECT name, country FROM iceberg.`default`.cities"))) .containsExactlyInAnyOrder( Row.of("new york", "usa"), Row.of("chicago", "usa"), Row.of("berlin", "germany"), Row.of("hamburg", "germany")); tEnv.executeSql("DROP TABLE paimon.`default`.cities"); tEnv.executeSql(createTableDdl); tEnv.executeSql( "INSERT INTO paimon.`default`.cities VALUES " + "('usa', 'houston'), " + "('germany', 'munich')") .await(); assertThat(collect(tEnv.executeSql("SELECT name, country FROM iceberg.`default`.cities"))) .containsExactlyInAnyOrder(Row.of("houston", "usa"), Row.of("munich", "germany")); tEnv.executeSql( "INSERT INTO paimon.`default`.cities VALUES " + "('usa', 'san francisco'), " + "('germany', 'cologne')") .await(); assertThat( collect( tEnv.executeSql( "SELECT name FROM iceberg.`default`.cities WHERE country = 'germany'"))) .containsExactlyInAnyOrder(Row.of("munich"), Row.of("cologne")); } @ParameterizedTest @ValueSource(strings = {"orc", "parquet", "avro"}) public void testNestedTypes(String format) throws Exception { String warehouse = getTempDirPath(); TableEnvironment tEnv = tableEnvironmentBuilder().batchMode().parallelism(2).build(); tEnv.executeSql( "CREATE CATALOG paimon WITH (\n" + " 'type' = 'paimon',\n" + " 'warehouse' = '" + warehouse + "'\n" + ")"); tEnv.executeSql( "CREATE TABLE paimon.`default`.T (\n" + " k INT,\n" + " v MAP<INT, ARRAY<ROW(f1 STRING, f2 INT)>>,\n" + " v2 BIGINT\n" + ") WITH (\n" + " 'metadata.iceberg.storage' = 'hadoop-catalog',\n" + " 'file.format' = '" + format + "'\n" + ")"); tEnv.executeSql( "INSERT INTO paimon.`default`.T VALUES " + "(1, MAP[10, ARRAY[ROW('apple', 100), ROW('banana', 101)], 20, ARRAY[ROW('cat', 102), ROW('dog', 103)]], 1000), " + "(2, MAP[10, ARRAY[ROW('cherry', 200), ROW('pear', 201)], 20, ARRAY[ROW('tiger', 202), ROW('wolf', 203)]], 2000)") .await(); tEnv.executeSql( "CREATE CATALOG iceberg WITH (\n" + " 'type' = 'iceberg',\n" + " 'catalog-type' = 'hadoop',\n" + " 'warehouse' = '" + warehouse + "/iceberg',\n" + " 'cache-enabled' = 'false'\n" + ")"); assertThat(collect(tEnv.executeSql("SELECT k, v[10], v2 FROM iceberg.`default`.T"))) .containsExactlyInAnyOrder( Row.of(1, new Row[] {Row.of("apple", 100), Row.of("banana", 101)}, 1000L), Row.of(2, new Row[] {Row.of("cherry", 200), Row.of("pear", 201)}, 2000L)); tEnv.executeSql( "INSERT INTO paimon.`default`.T VALUES " + "(3, MAP[10, ARRAY[ROW('mango', 300), ROW('watermelon', 301)], 20, ARRAY[ROW('rabbit', 302), ROW('lion', 303)]], 3000)") .await(); assertThat( collect( tEnv.executeSql( "SELECT k, v[10][2].f1, v2 FROM iceberg.`default`.T WHERE v[20][1].f2 > 200"))) .containsExactlyInAnyOrder( Row.of(2, "pear", 2000L), Row.of(3, "watermelon", 3000L)); } @ParameterizedTest @ValueSource(strings = {"orc", "parquet", "avro"}) public void testCreateTags(String format) throws Exception { String warehouse = getTempDirPath(); TableEnvironment tEnv = tableEnvironmentBuilder().batchMode().parallelism(2).build(); tEnv.executeSql( "CREATE CATALOG paimon WITH (\n" + " 'type' = 'paimon',\n" + " 'warehouse' = '" + warehouse + "'\n" + ")"); tEnv.executeSql( "CREATE TABLE paimon.`default`.T (\n" + " pt INT,\n" + " k INT,\n" + " v1 INT,\n" + " v2 STRING,\n" + " PRIMARY KEY (pt, k) NOT ENFORCED\n" + ") PARTITIONED BY (pt) WITH (\n" + " 'metadata.iceberg.storage' = 'hadoop-catalog',\n" // make sure all changes are visible in iceberg metadata + " 'full-compaction.delta-commits' = '1',\n" + " 'file.format' = '" + format + "'\n" + ")"); tEnv.executeSql( "INSERT INTO paimon.`default`.T VALUES " + "(1, 10, 100, 'apple'), " + "(1, 11, 110, 'banana'), " + "(2, 20, 200, 'cat'), " + "(2, 21, 210, 'dog')") .await(); tEnv.executeSql( "INSERT INTO paimon.`default`.T VALUES " + "(1, 10, 101, 'red'), " + "(1, 12, 121, 'green'), " + "(2, 20, 201, 'blue'), " + "(2, 22, 221, 'yellow')") .await(); tEnv.executeSql( "CREATE CATALOG iceberg WITH (\n" + " 'type' = 'iceberg',\n" + " 'catalog-type' = 'hadoop',\n" + " 'warehouse' = '" + warehouse + "/iceberg',\n" + " 'cache-enabled' = 'false'\n" + ")"); tEnv.executeSql("CALL paimon.sys.create_tag('default.T', 'tag1', 1)"); assertThat( collect( tEnv.executeSql( "SELECT v1, k, v2, pt FROM iceberg.`default`.T /*+ OPTIONS('tag'='tag1') */ ORDER BY pt, k"))) .containsExactly( Row.of(100, 10, "apple", 1), Row.of(110, 11, "banana", 1), Row.of(200, 20, "cat", 2), Row.of(210, 21, "dog", 2)); // Snapshot ID 4 due to full-compaction.delta-commits=1 tEnv.executeSql("CALL paimon.sys.create_tag('default.T', 'tag2', 4)"); assertThat( collect( tEnv.executeSql( "SELECT v1, k, v2, pt FROM iceberg.`default`.T /*+ OPTIONS('tag'='tag2') */ ORDER BY pt, k"))) .containsExactly( Row.of(101, 10, "red", 1), Row.of(110, 11, "banana", 1), Row.of(121, 12, "green", 1), Row.of(201, 20, "blue", 2), Row.of(210, 21, "dog", 2), Row.of(221, 22, "yellow", 2)); assertThat( collect( tEnv.executeSql( "SELECT name, type, snapshot_id FROM iceberg.`default`.T$refs"))) .containsExactlyInAnyOrder(Row.of("tag1", "TAG", 1L), Row.of("tag2", "TAG", 4L)); } @ParameterizedTest @ValueSource(strings = {"orc", "parquet", "avro"}) public void testDeleteTags(String format) throws Exception { String warehouse = getTempDirPath(); TableEnvironment tEnv = tableEnvironmentBuilder().batchMode().parallelism(2).build(); tEnv.executeSql( "CREATE CATALOG paimon WITH (\n" + " 'type' = 'paimon',\n" + " 'warehouse' = '" + warehouse + "'\n" + ")"); tEnv.executeSql( "CREATE TABLE paimon.`default`.T (\n" + " pt INT,\n" + " k INT,\n" + " v1 INT,\n" + " v2 STRING,\n" + " PRIMARY KEY (pt, k) NOT ENFORCED\n" + ") PARTITIONED BY (pt) WITH (\n" + " 'metadata.iceberg.storage' = 'hadoop-catalog',\n" // make sure all changes are visible in iceberg metadata + " 'full-compaction.delta-commits' = '1',\n" + " 'file.format' = '" + format + "'\n" + ")"); tEnv.executeSql( "INSERT INTO paimon.`default`.T VALUES " + "(1, 10, 100, 'apple'), " + "(1, 11, 110, 'banana'), " + "(2, 20, 200, 'cat'), " + "(2, 21, 210, 'dog')") .await(); tEnv.executeSql( "INSERT INTO paimon.`default`.T VALUES " + "(1, 10, 101, 'red'), " + "(1, 12, 121, 'green'), " + "(2, 20, 201, 'blue'), " + "(2, 22, 221, 'yellow')") .await(); tEnv.executeSql( "CREATE CATALOG iceberg WITH (\n" + " 'type' = 'iceberg',\n" + " 'catalog-type' = 'hadoop',\n" + " 'warehouse' = '" + warehouse + "/iceberg',\n" + " 'cache-enabled' = 'false'\n" + ")"); tEnv.executeSql("CALL paimon.sys.create_tag('default.T', 'tag1', 1)"); // Snapshot ID 4 due to full-compaction.delta-commits=1 tEnv.executeSql("CALL paimon.sys.create_tag('default.T', 'tag2', 4)"); // Delete tag tEnv.executeSql("CALL paimon.sys.delete_tag('default.T', 'tag2')"); assertThat( collect( tEnv.executeSql( "SELECT v1, k, v2, pt FROM iceberg.`default`.T /*+ OPTIONS('tag'='tag1') */ ORDER BY pt, k"))) .containsExactly( Row.of(100, 10, "apple", 1), Row.of(110, 11, "banana", 1), Row.of(200, 20, "cat", 2), Row.of(210, 21, "dog", 2)); assertThat( collect( tEnv.executeSql( "SELECT name, type, snapshot_id FROM iceberg.`default`.T$refs"))) .containsExactlyInAnyOrder(Row.of("tag1", "TAG", 1L)); } @ParameterizedTest @ValueSource(strings = {"orc", "parquet", "avro"}) public void testReplaceTags(String format) throws Exception { String warehouse = getTempDirPath(); TableEnvironment tEnv = tableEnvironmentBuilder().batchMode().parallelism(2).build(); tEnv.executeSql( "CREATE CATALOG paimon WITH (\n" + " 'type' = 'paimon',\n" + " 'warehouse' = '" + warehouse + "'\n" + ")"); tEnv.executeSql( "CREATE TABLE paimon.`default`.T (\n" + " pt INT,\n" + " k INT,\n" + " v1 INT,\n" + " v2 STRING,\n" + " PRIMARY KEY (pt, k) NOT ENFORCED\n" + ") PARTITIONED BY (pt) WITH (\n" + " 'metadata.iceberg.storage' = 'hadoop-catalog',\n" // make sure all changes are visible in iceberg metadata + " 'full-compaction.delta-commits' = '1',\n" + " 'file.format' = '" + format + "'\n" + ")"); tEnv.executeSql( "INSERT INTO paimon.`default`.T VALUES " + "(1, 10, 100, 'apple'), " + "(1, 11, 110, 'banana'), " + "(2, 20, 200, 'cat'), " + "(2, 21, 210, 'dog')") .await(); tEnv.executeSql( "INSERT INTO paimon.`default`.T VALUES " + "(1, 10, 101, 'red'), " + "(1, 12, 121, 'green'), " + "(2, 20, 201, 'blue'), " + "(2, 22, 221, 'yellow')") .await(); tEnv.executeSql( "CREATE CATALOG iceberg WITH (\n" + " 'type' = 'iceberg',\n" + " 'catalog-type' = 'hadoop',\n" + " 'warehouse' = '" + warehouse + "/iceberg',\n" + " 'cache-enabled' = 'false'\n" + ")"); tEnv.executeSql("CALL paimon.sys.create_tag('default.T', 'tag1', 1)"); // Replace tag tEnv.executeSql("CALL paimon.sys.replace_tag('default.T', 'tag1', 4, '1d')"); assertThat( collect( tEnv.executeSql( "SELECT name, type, snapshot_id FROM iceberg.`default`.T$refs"))) .containsExactlyInAnyOrder(Row.of("tag1", "TAG", 4L)); assertThat( collect( tEnv.executeSql( "SELECT v1, k, v2, pt FROM iceberg.`default`.T /*+ OPTIONS('tag'='tag1') */ ORDER BY pt, k"))) .containsExactly( Row.of(101, 10, "red", 1), Row.of(110, 11, "banana", 1), Row.of(121, 12, "green", 1), Row.of(201, 20, "blue", 2), Row.of(210, 21, "dog", 2), Row.of(221, 22, "yellow", 2)); } private List<Row> collect(TableResult result) throws Exception { List<Row> rows = new ArrayList<>(); try (CloseableIterator<Row> it = result.collect()) { while (it.hasNext()) { rows.add(it.next()); } } return rows; } }
apache/seatunnel-web
35,547
seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/impl/JobMetricsServiceImpl.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.seatunnel.app.service.impl; import org.apache.seatunnel.app.common.EngineType; import org.apache.seatunnel.app.dal.dao.IJobInstanceDao; import org.apache.seatunnel.app.dal.dao.IJobInstanceHistoryDao; import org.apache.seatunnel.app.dal.dao.IJobMetricsDao; import org.apache.seatunnel.app.dal.entity.JobInstance; import org.apache.seatunnel.app.dal.entity.JobInstanceHistory; import org.apache.seatunnel.app.dal.entity.JobMetrics; import org.apache.seatunnel.app.dal.entity.JobMetricsHistory; import org.apache.seatunnel.app.dal.mapper.JobMetricsHistoryMapper; import org.apache.seatunnel.app.domain.response.engine.Engine; import org.apache.seatunnel.app.domain.response.metrics.JobDAG; import org.apache.seatunnel.app.domain.response.metrics.JobPipelineDetailMetricsRes; import org.apache.seatunnel.app.domain.response.metrics.JobPipelineSummaryMetricsRes; import org.apache.seatunnel.app.domain.response.metrics.JobSummaryMetricsRes; import org.apache.seatunnel.app.permission.constants.SeatunnelFuncPermissionKeyConstant; import org.apache.seatunnel.app.service.IJobMetricsService; import org.apache.seatunnel.app.thirdparty.engine.SeaTunnelEngineProxy; import org.apache.seatunnel.app.thirdparty.metrics.EngineMetricsExtractorFactory; import org.apache.seatunnel.app.thirdparty.metrics.IEngineMetricsExtractor; import org.apache.seatunnel.app.utils.JobUtils; import org.apache.seatunnel.app.utils.ServletUtils; import org.apache.seatunnel.common.constants.JobMode; import org.apache.seatunnel.common.utils.JsonUtils; import org.apache.seatunnel.engine.core.job.JobStatus; import org.apache.seatunnel.server.common.CodeGenerateUtils; import org.apache.seatunnel.server.common.Constants; import org.apache.seatunnel.server.common.SeatunnelErrorEnum; import org.apache.seatunnel.server.common.SeatunnelException; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.ImmutablePair; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.DuplicateKeyException; import org.springframework.stereotype.Service; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import javax.annotation.Resource; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; @Service @Slf4j public class JobMetricsServiceImpl extends SeatunnelBaseServiceImpl implements IJobMetricsService { @Resource private IJobMetricsDao jobMetricsDao; @Resource private IJobInstanceHistoryDao jobInstanceHistoryDao; @Resource private IJobInstanceDao jobInstanceDao; @Autowired private JobMetricsHistoryMapper jobMetricsHistoryMapper; @Override public List<JobPipelineSummaryMetricsRes> getJobPipelineSummaryMetrics( @NonNull Long jobInstanceId) { int userId = ServletUtils.getCurrentUserId(); funcPermissionCheck(SeatunnelFuncPermissionKeyConstant.JOB_METRICS_SUMMARY, userId); JobInstance jobInstance = jobInstanceDao.getJobInstance(jobInstanceId); List<JobMetrics> jobPipelineDetailMetrics = getJobPipelineDetailMetrics(jobInstance); return summaryMetrics(jobPipelineDetailMetrics); } @Override public JobSummaryMetricsRes getJobSummaryMetrics( @NonNull Long jobInstanceId, @NonNull String jobEngineId) { int userId = ServletUtils.getCurrentUserId(); funcPermissionCheck(SeatunnelFuncPermissionKeyConstant.JOB_METRICS_SUMMARY, userId); JobInstance jobInstance = jobInstanceDao.getJobInstance(jobInstanceId); Engine engine = new Engine(jobInstance.getEngineName(), jobInstance.getEngineVersion()); IEngineMetricsExtractor engineMetricsExtractor = (new EngineMetricsExtractorFactory(engine)).getEngineMetricsExtractor(); JobStatus jobStatus = engineMetricsExtractor.getJobStatus(jobEngineId); List<JobMetrics> jobPipelineDetailMetrics = getJobPipelineDetailMetrics(jobInstance); long readCount = jobPipelineDetailMetrics.stream().mapToLong(JobMetrics::getReadRowCount).sum(); long writeCount = jobPipelineDetailMetrics.stream().mapToLong(JobMetrics::getWriteRowCount).sum(); return new JobSummaryMetricsRes( jobInstanceId, Long.parseLong(jobEngineId), readCount, writeCount, jobStatus); } @Override public Map<Long, JobSummaryMetricsRes> getALLJobSummaryMetrics( @NonNull Map<Long, Long> jobInstanceIdAndJobEngineIdMap, @NonNull List<Long> jobInstanceIdList, @NonNull JobMode jobMode) { log.info("jobInstanceIdAndJobEngineIdMap={}", jobInstanceIdAndJobEngineIdMap); int userId = ServletUtils.getCurrentUserId(); funcPermissionCheck(SeatunnelFuncPermissionKeyConstant.JOB_METRICS_SUMMARY, userId); List<JobInstance> allJobInstance = jobInstanceDao.getAllJobInstance(jobInstanceIdList); if (allJobInstance.isEmpty()) { log.warn( "getALLJobSummaryMetrics : allJobInstance is empty, task id list is {}", jobInstanceIdList); return new HashMap<>(); } Map<Long, JobSummaryMetricsRes> result = null; Map<Long, HashMap<Integer, JobMetrics>> allRunningJobMetricsFromEngine = getAllRunningJobMetricsFromEngine( allJobInstance.get(0).getEngineName(), allJobInstance.get(0).getEngineVersion()); if (JobMode.BATCH == jobMode) { result = getMatricsListIfTaskTypeIsBatch( allJobInstance, allRunningJobMetricsFromEngine, jobInstanceIdAndJobEngineIdMap); } else if (JobMode.STREAMING == jobMode) { result = getMatricsListIfTaskTypeIsStreaming( allJobInstance, allRunningJobMetricsFromEngine, jobInstanceIdAndJobEngineIdMap); } log.info("result is {}", result == null ? "null" : result.toString()); return result; } private Map<Long, JobSummaryMetricsRes> getMatricsListIfTaskTypeIsBatch( List<JobInstance> allJobInstance, Map<Long, HashMap<Integer, JobMetrics>> allRunningJobMetricsFromEngine, Map<Long, Long> jobInstanceIdAndJobEngineIdMap) { HashMap<Long, JobSummaryMetricsRes> jobSummaryMetricsResMap = new HashMap<>(); log.info("allRunningJobMetricsFromEngine is {}", allRunningJobMetricsFromEngine.toString()); // Traverse all jobInstances in allJobInstance for (JobInstance jobInstance : allJobInstance) { log.info("jobEngineId={}", jobInstance.getJobEngineId()); if (jobInstance.getJobStatus() == null || jobInstance.getJobStatus() == JobStatus.FAILED || jobInstance.getJobStatus() == JobStatus.RUNNING) { // Obtain monitoring information from the collection of running jobs returned from // the engine if (!allRunningJobMetricsFromEngine.isEmpty() && allRunningJobMetricsFromEngine.containsKey( jobInstanceIdAndJobEngineIdMap.get(jobInstance.getId()))) { JobSummaryMetricsRes jobMetricsFromEngineRes = getRunningJobMetricsFromEngine( allRunningJobMetricsFromEngine, jobInstanceIdAndJobEngineIdMap, jobInstance); jobSummaryMetricsResMap.put(jobInstance.getId(), jobMetricsFromEngineRes); modifyAndUpdateJobInstanceAndJobMetrics( jobInstance, allRunningJobMetricsFromEngine, jobInstanceIdAndJobEngineIdMap); } else { log.info( "The job does not exist on the engine, it is directly returned from the database"); JobSummaryMetricsRes jobMetricsFromDb = getJobSummaryMetricsResByDb( jobInstance, Long.toString( jobInstanceIdAndJobEngineIdMap.get( jobInstance.getId()))); if (jobMetricsFromDb != null) { jobSummaryMetricsResMap.put(jobInstance.getId(), jobMetricsFromDb); } if (jobInstance.getJobStatus() == JobStatus.RUNNING) { // Set the job status of jobInstance and jobMetrics in the database to // finished jobInstance.setJobStatus(JobStatus.FINISHED); jobInstanceDao.getJobInstanceMapper().updateById(jobInstance); } } } else if (jobInstance.getJobStatus() == JobStatus.FINISHED || jobInstance.getJobStatus() == JobStatus.CANCELED) { // If the status of the job is finished or cancelled, the monitoring information is // directly obtained from MySQL JobSummaryMetricsRes jobMetricsFromDb = getJobSummaryMetricsResByDb( jobInstance, Long.toString( jobInstanceIdAndJobEngineIdMap.get(jobInstance.getId()))); log.info("jobStatus=finish oe canceled,JobSummaryMetricsRes={}", jobMetricsFromDb); jobSummaryMetricsResMap.put(jobInstance.getId(), jobMetricsFromDb); } } return jobSummaryMetricsResMap; } private void modifyAndUpdateJobInstanceAndJobMetrics( JobInstance jobInstance, Map<Long, HashMap<Integer, JobMetrics>> allRunningJobMetricsFromEngine, Map<Long, Long> jobInstanceIdAndJobEngineIdMap) { jobInstance.setJobStatus(JobStatus.RUNNING); HashMap<Integer, JobMetrics> jobMetricsFromEngine = allRunningJobMetricsFromEngine.get( jobInstanceIdAndJobEngineIdMap.get(jobInstance.getId())); List<JobMetrics> jobMetricsFromDb = jobMetricsDao.getByInstanceId(jobInstance.getId()); log.info("001jobMetricsFromDb={}", jobMetricsFromDb); if (jobMetricsFromDb.isEmpty()) { log.info("002jobMetricsFromDb == null"); syncMetricsToDbRunning(jobInstance, jobMetricsFromEngine); jobInstanceDao.update(jobInstance); } else { jobMetricsFromDb.forEach( jobMetrics -> jobMetrics.setReadRowCount( jobMetricsFromEngine .get(jobMetrics.getPipelineId()) .getReadRowCount())); jobMetricsFromDb.forEach( jobMetrics -> jobMetrics.setWriteRowCount( jobMetricsFromEngine .get(jobMetrics.getPipelineId()) .getWriteRowCount())); jobMetricsFromDb.forEach(jobMetrics -> jobMetrics.setStatus(JobStatus.RUNNING)); updateJobInstanceAndMetrics(jobInstance, jobMetricsFromDb); } } private Map<Long, JobSummaryMetricsRes> getMatricsListIfTaskTypeIsStreaming( List<JobInstance> allJobInstance, Map<Long, HashMap<Integer, JobMetrics>> allRunningJobMetricsFromEngine, Map<Long, Long> jobInstanceIdAndJobEngineIdMap) { HashMap<Long, JobSummaryMetricsRes> jobSummaryMetricsResMap = new HashMap<>(); // Traverse all jobInstances in allJobInstance for (JobInstance jobInstance : allJobInstance) { try { if (jobInstance.getJobStatus() != null && jobInstance.getJobStatus() == JobStatus.CANCELED) { // If the status of the job is finished or cancelled // the monitoring information is directly obtained from MySQL JobSummaryMetricsRes jobMetricsFromDb = getJobSummaryMetricsResByDb( jobInstance, Long.toString( jobInstanceIdAndJobEngineIdMap.get( jobInstance.getId()))); jobSummaryMetricsResMap.put(jobInstance.getId(), jobMetricsFromDb); } else if (jobInstance.getJobStatus() != null && (jobInstance.getJobStatus() == JobStatus.FINISHED || jobInstance.getJobStatus() == JobStatus.FAILED)) { // Obtain monitoring information from the collection of running jobs returned // from // the engine if (!allRunningJobMetricsFromEngine.isEmpty() && allRunningJobMetricsFromEngine.containsKey( jobInstanceIdAndJobEngineIdMap.get(jobInstance.getId()))) { // If it can be found, update the information in MySQL and return it to the // front-end data modifyAndUpdateJobInstanceAndJobMetrics( jobInstance, allRunningJobMetricsFromEngine, jobInstanceIdAndJobEngineIdMap); // Return data from the front-end JobSummaryMetricsRes jobMetricsFromEngineRes = getRunningJobMetricsFromEngine( allRunningJobMetricsFromEngine, jobInstanceIdAndJobEngineIdMap, jobInstance); jobSummaryMetricsResMap.put(jobInstance.getId(), jobMetricsFromEngineRes); } else { // If not found, obtain information from MySQL JobSummaryMetricsRes jobMetricsFromDb = getJobSummaryMetricsResByDb( jobInstance, Long.toString( jobInstanceIdAndJobEngineIdMap.get( jobInstance.getId()))); jobSummaryMetricsResMap.put(jobInstance.getId(), jobMetricsFromDb); } } else { // Obtain monitoring information from the collection of running jobs returned // from // the engine if (!allRunningJobMetricsFromEngine.isEmpty() && allRunningJobMetricsFromEngine.containsKey( jobInstanceIdAndJobEngineIdMap.get(jobInstance.getId()))) { modifyAndUpdateJobInstanceAndJobMetrics( jobInstance, allRunningJobMetricsFromEngine, jobInstanceIdAndJobEngineIdMap); // Return data from the front-end JobSummaryMetricsRes jobMetricsFromEngineRes = getRunningJobMetricsFromEngine( allRunningJobMetricsFromEngine, jobInstanceIdAndJobEngineIdMap, jobInstance); jobSummaryMetricsResMap.put(jobInstance.getId(), jobMetricsFromEngineRes); } else { JobStatus jobStatus = null; try { jobStatus = getJobStatusByJobEngineId( String.valueOf( jobInstanceIdAndJobEngineIdMap.get( jobInstance.getId()))); } catch (Exception e) { log.warn( "getMetricsListIfTaskTypeIsStreaming getJobStatusByJobEngineId is exception jobInstanceId is : {}", jobInstance.getId()); } if (jobStatus != null) { jobInstance.setJobStatus(jobStatus); jobInstanceDao.update(jobInstance); JobSummaryMetricsRes jobSummaryMetricsResByDb = getJobSummaryMetricsResByDb( jobInstance, String.valueOf( jobInstanceIdAndJobEngineIdMap.get( jobInstance.getId()))); jobSummaryMetricsResMap.put( jobInstance.getId(), jobSummaryMetricsResByDb); List<JobMetrics> jobMetricsFromDb = getJobMetricsFromDb( jobInstance, String.valueOf( jobInstanceIdAndJobEngineIdMap.get( jobInstance.getId()))); if (!jobMetricsFromDb.isEmpty()) { JobStatus finalJobStatusByJobEngineId = jobStatus; jobMetricsFromDb.forEach( jobMetrics -> jobMetrics.setStatus(finalJobStatusByJobEngineId)); for (JobMetrics jobMetrics : jobMetricsFromDb) { jobMetricsDao.getJobMetricsMapper().updateById(jobMetrics); } } } } } } catch (Exception e) { throw new RuntimeException(e); } } return jobSummaryMetricsResMap; } private JobSummaryMetricsRes getRunningJobMetricsFromEngine( Map<Long, HashMap<Integer, JobMetrics>> allRunningJobMetricsFromEngine, Map<Long, Long> jobInstanceIdAndJobEngineIdMap, JobInstance jobInstance) { // If there is job information in the engine HashMap<Integer, JobMetrics> jobMetricsFromEngine = allRunningJobMetricsFromEngine.get( jobInstanceIdAndJobEngineIdMap.get(jobInstance.getId())); log.info("0706jobMetricsFromEngine={}", jobMetricsFromEngine); long readCount = jobMetricsFromEngine.values().stream().mapToLong(JobMetrics::getReadRowCount).sum(); long writeCount = jobMetricsFromEngine.values().stream() .mapToLong(JobMetrics::getWriteRowCount) .sum(); log.info("jobInstance={}", jobInstance); return new JobSummaryMetricsRes( jobInstance.getId(), 1L, readCount, writeCount, JobStatus.RUNNING); } private JobSummaryMetricsRes getJobSummaryMetricsResByDb( JobInstance jobInstance, String jobEngineId) { List<JobMetrics> jobMetricsFromDb = getJobMetricsFromDb(jobInstance, jobEngineId); if (!jobMetricsFromDb.isEmpty()) { long readCount = jobMetricsFromDb.stream().mapToLong(JobMetrics::getReadRowCount).sum(); long writeCount = jobMetricsFromDb.stream().mapToLong(JobMetrics::getWriteRowCount).sum(); return new JobSummaryMetricsRes( jobInstance.getId(), Long.parseLong(jobInstance.getJobEngineId()), readCount, writeCount, jobInstance.getJobStatus()); } return null; } private Map<Long, HashMap<Integer, JobMetrics>> getAllRunningJobMetricsFromEngine( EngineType engineName, String engineVersion) { Engine engine = new Engine(engineName, engineVersion); IEngineMetricsExtractor engineMetricsExtractor = (new EngineMetricsExtractorFactory(engine)).getEngineMetricsExtractor(); return engineMetricsExtractor.getAllRunningJobMetrics(); } private void updateJobInstanceAndMetrics(JobInstance jobInstance, List<JobMetrics> jobMetrics) { if (jobInstance != null && jobMetrics != null) { jobInstanceDao.update(jobInstance); // jobMetricsFromDb for (JobMetrics jobMetric : jobMetrics) { jobMetricsDao.getJobMetricsMapper().updateById(jobMetric); } } } private JobStatus getJobStatusByJobEngineId(String jobEngineId) { return SeaTunnelEngineProxy.getInstance().getJobStatus(jobEngineId); } private Map<Integer, JobMetrics> getJobMetricsFromEngineMap( @NonNull JobInstance jobInstance, @NonNull String jobEngineId) { log.info("enter getJobMetricsFromEngine"); Engine engine = new Engine(jobInstance.getEngineName(), jobInstance.getEngineVersion()); IEngineMetricsExtractor engineMetricsExtractor = (new EngineMetricsExtractorFactory(engine)).getEngineMetricsExtractor(); return engineMetricsExtractor.getMetricsByJobEngineIdRTMap(jobEngineId); } private List<JobMetrics> getJobPipelineDetailMetrics(@NonNull JobInstance jobInstance) { List<JobMetrics> jobMetrics; if (JobUtils.isJobEndStatus(jobInstance.getJobStatus())) { jobMetrics = getJobMetricsFromDb(jobInstance, jobInstance.getJobEngineId()); if (CollectionUtils.isEmpty(jobMetrics)) { jobMetrics = getJobMetricsFromEngine(jobInstance, jobInstance.getJobEngineId()); if (!jobMetrics.isEmpty()) { // If engine returns some metrics then it makes sens to insert into database syncMetricsToDb(jobInstance, jobInstance.getJobEngineId()); } } } else { // If job is not end state, get metrics from engine. jobMetrics = getJobMetricsFromEngine(jobInstance, jobInstance.getJobEngineId()); } return jobMetrics; } @Override public List<JobPipelineDetailMetricsRes> getJobPipelineDetailMetricsRes( @NonNull Long jobInstanceId) { int userId = ServletUtils.getCurrentUserId(); funcPermissionCheck(SeatunnelFuncPermissionKeyConstant.JOB_DETAIL, userId); JobInstance jobInstance = jobInstanceDao.getJobInstance(jobInstanceId); List<JobMetrics> jobPipelineDetailMetrics = getJobPipelineDetailMetrics(jobInstance); return jobPipelineDetailMetrics.stream() .map(this::wrapperJobMetrics) .collect(Collectors.toList()); } @Override public List<JobPipelineDetailMetricsRes> getJobPipelineDetailMetricsRes( @NonNull JobInstance jobInstance) { if (JobUtils.isJobEndStatus(jobInstance.getJobStatus())) { return new ArrayList<>(); } List<JobMetrics> jobPipelineDetailMetrics = getJobMetricsFromEngine(jobInstance, jobInstance.getJobEngineId()); return jobPipelineDetailMetrics.stream() .map(this::wrapperJobMetrics) .collect(Collectors.toList()); } @Override public JobDAG getJobDAG(@NonNull Long jobInstanceId) { int userId = ServletUtils.getCurrentUserId(); funcPermissionCheck(SeatunnelFuncPermissionKeyConstant.JOB_DAG, userId); JobInstance jobInstance = jobInstanceDao.getJobInstance(jobInstanceId); String jobEngineId = jobInstance.getJobEngineId(); JobInstanceHistory history = getJobHistoryFromDb(jobInstance, jobEngineId); if (history != null) { String dag = history.getDag(); return JsonUtils.parseObject(dag, JobDAG.class); } Engine engine = new Engine(jobInstance.getEngineName(), jobInstance.getEngineVersion()); IEngineMetricsExtractor engineMetricsExtractor = (new EngineMetricsExtractorFactory(engine)).getEngineMetricsExtractor(); if (engineMetricsExtractor.isJobEnd(jobEngineId)) { syncHistoryJobInfoToDb(jobInstance, jobEngineId); history = getJobHistoryFromDb(jobInstance, jobEngineId); } else { history = getJobHistoryFromEngine(jobInstance, jobEngineId); } if (history != null) { String dag = history.getDag(); return JsonUtils.parseObject(dag, JobDAG.class); } return null; } private JobInstanceHistory getJobHistoryFromEngine( @NonNull JobInstance jobInstance, String jobEngineId) { Engine engine = new Engine(jobInstance.getEngineName(), jobInstance.getEngineVersion()); IEngineMetricsExtractor engineMetricsExtractor = (new EngineMetricsExtractorFactory(engine)).getEngineMetricsExtractor(); return engineMetricsExtractor.getJobHistoryById(jobEngineId); } private JobInstanceHistory getJobHistoryFromDb( @NonNull JobInstance jobInstance, String jobEngineId) { // relation jobInstanceId and jobEngineId relationJobInstanceAndJobEngineId(jobInstance, jobEngineId); return jobInstanceHistoryDao.getByInstanceId(jobInstance.getId()); } @Override public void syncJobDataToDb(@NonNull JobInstance jobInstance, @NonNull String jobEngineId) { relationJobInstanceAndJobEngineId(jobInstance, jobEngineId); syncMetricsToDb(jobInstance, jobEngineId); syncHistoryJobInfoToDb(jobInstance, jobEngineId); syncCompleteJobInfoToDb(jobInstance); } private void syncMetricsToDb(@NonNull JobInstance jobInstance, @NonNull String jobEngineId) { Map<Integer, JobMetrics> jobMetricsFromEngineMap = getJobMetricsFromEngineMap(jobInstance, jobEngineId); int userId = ServletUtils.getCurrentUserId(); List<JobMetrics> jobMetricsFromDb = getJobMetricsFromDb(jobInstance, jobEngineId); if (jobMetricsFromDb.isEmpty()) { List<JobMetrics> jobMetricsFromEngine = Arrays.asList(jobMetricsFromEngineMap.values().toArray(new JobMetrics[0])); jobMetricsFromEngine.forEach( metrics -> { try { metrics.setId(CodeGenerateUtils.getInstance().genCode()); } catch (CodeGenerateUtils.CodeGenerateException e) { throw new SeatunnelException( SeatunnelErrorEnum.JOB_RUN_GENERATE_UUID_ERROR); } metrics.setJobInstanceId(jobInstance.getId()); metrics.setCreateUserId(userId); metrics.setUpdateUserId(userId); metrics.setWorkspaceId(jobInstance.getWorkspaceId()); }); if (!jobMetricsFromEngine.isEmpty()) { jobMetricsDao.getJobMetricsMapper().insertBatchMetrics(jobMetricsFromEngine); } } else { JobStatus jobStatus = getJobStatusByJobEngineId(jobEngineId); for (JobMetrics jobMetrics : jobMetricsFromDb) { Integer pipelineId = jobMetrics.getPipelineId(); JobMetrics currentPiplinejobMetricsFromEngine = jobMetricsFromEngineMap.get(pipelineId); jobMetrics.setWriteQps(currentPiplinejobMetricsFromEngine.getWriteQps()); jobMetrics.setReadQps(currentPiplinejobMetricsFromEngine.getReadQps()); jobMetrics.setReadRowCount(currentPiplinejobMetricsFromEngine.getReadRowCount()); jobMetrics.setWriteRowCount(currentPiplinejobMetricsFromEngine.getWriteRowCount()); jobMetrics.setStatus(jobStatus); jobMetricsDao.getJobMetricsMapper().updateById(jobMetrics); } } } private void syncHistoryJobInfoToDb( @NonNull JobInstance jobInstance, @NonNull String jobEngineId) { JobInstanceHistory jobHistoryFromEngine = getJobHistoryFromEngine(jobInstance, jobEngineId); jobHistoryFromEngine.setId(jobInstance.getId()); JobInstanceHistory byInstanceId = jobInstanceHistoryDao.getByInstanceId(jobInstance.getId()); if (byInstanceId == null) { try { jobInstanceHistoryDao.insert(jobHistoryFromEngine); } catch (DuplicateKeyException e) { // Handle the race condition gracefully jobInstanceHistoryDao.updateJobInstanceHistory(jobHistoryFromEngine); } } else { jobInstanceHistoryDao.updateJobInstanceHistory(jobHistoryFromEngine); } } private void syncCompleteJobInfoToDb(@NonNull JobInstance jobInstance) { jobInstance.setEndTime(new Date()); jobInstanceDao.update(jobInstance); } private void relationJobInstanceAndJobEngineId( @NonNull JobInstance jobInstance, @NonNull String jobEngineId) { // relation jobInstanceId and jobEngineId if (StringUtils.isEmpty(jobInstance.getJobEngineId())) { int userId = ServletUtils.getCurrentUserId(); jobInstance.setJobEngineId(jobEngineId); jobInstance.setUpdateUserId(userId); jobInstanceDao.update(jobInstance); } } private List<JobMetrics> getJobMetricsFromEngine( @NonNull JobInstance jobInstance, @NonNull String jobEngineId) { Engine engine = new Engine(jobInstance.getEngineName(), jobInstance.getEngineVersion()); IEngineMetricsExtractor engineMetricsExtractor = (new EngineMetricsExtractorFactory(engine)).getEngineMetricsExtractor(); return engineMetricsExtractor.getMetricsByJobEngineId(jobEngineId); } private List<JobPipelineSummaryMetricsRes> summaryMetrics( @NonNull List<JobMetrics> jobPipelineDetailedMetrics) { return jobPipelineDetailedMetrics.stream() .map( metrics -> new JobPipelineSummaryMetricsRes( metrics.getPipelineId(), metrics.getReadRowCount(), metrics.getWriteRowCount(), metrics.getStatus())) .collect(Collectors.toList()); } private List<JobMetrics> getJobMetricsFromDb( @NonNull JobInstance jobInstance, @NonNull String jobEngineId) { // relation jobInstanceId and jobEngineId relationJobInstanceAndJobEngineId(jobInstance, jobEngineId); // get metrics from db return jobMetricsDao.getByInstanceId(jobInstance.getId()); } @Override public ImmutablePair<Long, String> getInstanceIdAndEngineId(@NonNull String key) { if (!key.contains(Constants.METRICS_QUERY_KEY_SPLIT) || key.split(Constants.METRICS_QUERY_KEY_SPLIT).length != 2) { throw new SeatunnelException(SeatunnelErrorEnum.JOB_METRICS_QUERY_KEY_ERROR, key); } String[] split = key.split(Constants.METRICS_QUERY_KEY_SPLIT); Long jobInstanceId = Long.valueOf(split[0]); String jobEngineId = split[1]; return new ImmutablePair<>(jobInstanceId, jobEngineId); } private JobPipelineDetailMetricsRes wrapperJobMetrics(@NonNull JobMetrics metrics) { return new JobPipelineDetailMetricsRes( metrics.getId(), metrics.getPipelineId(), metrics.getReadRowCount(), metrics.getWriteRowCount(), metrics.getSourceTableNames(), metrics.getSinkTableNames(), metrics.getReadQps(), metrics.getWriteQps(), metrics.getRecordDelay(), metrics.getStatus()); } private void syncMetricsToDbRunning( @NonNull JobInstance jobInstance, @NonNull Map<Integer, JobMetrics> jobMetricsMap) { int userId = ServletUtils.getCurrentUserId(); ArrayList<JobMetrics> list = new ArrayList<>(); for (Map.Entry<Integer, JobMetrics> entry : jobMetricsMap.entrySet()) { JobMetrics jobMetrics = entry.getValue(); jobMetrics.setId(CodeGenerateUtils.getInstance().genCode()); jobMetrics.setJobInstanceId(jobInstance.getId()); jobMetrics.setCreateUserId(userId); jobMetrics.setUpdateUserId(userId); jobMetrics.setWorkspaceId(ServletUtils.getCurrentWorkspaceId()); list.add(jobMetrics); } if (!list.isEmpty()) { log.info("003list={}", list); jobMetricsDao.getJobMetricsMapper().insertBatchMetrics(list); } } @Override @NonNull public List<JobMetricsHistory> getJobMetricsHistory(@NonNull Long jobInstanceId) { return jobMetricsHistoryMapper.queryJobMetricsHistoryByInstanceId(jobInstanceId); } @Override public List<JobMetricsHistory> getJobMetricsHistory( Long jobInstanceId, String startTime, String endTime) { if (StringUtils.isNotEmpty(startTime) && StringUtils.isNotEmpty(endTime)) { return jobMetricsHistoryMapper.queryJobMetricsHistoryByInstanceIdAndTimeRange( jobInstanceId, startTime, endTime); } return getJobMetricsHistory(jobInstanceId); } }
google/binnavi
35,249
src/test/java/com/google/security/zynamics/reil/algorithms/mono2/registertracking/BackwardRegisterTrackingTransformationProviderTest.java
/* Copyright 2014 Google Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.google.security.zynamics.reil.algorithms.mono2.registertracking; import com.google.security.zynamics.reil.OperandSize; import com.google.security.zynamics.reil.ReilHelpers; import com.google.security.zynamics.reil.ReilInstruction; import com.google.security.zynamics.reil.algorithms.mono2.common.enums.AnalysisDirection; import com.google.security.zynamics.zylib.general.Pair; import junit.framework.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import java.util.Set; import java.util.TreeSet; @RunWith(JUnit4.class) public class BackwardRegisterTrackingTransformationProviderTest { public static RegisterSetLatticeElement createTaintedState(final String register) { final Set<String> taintedRegs = new TreeSet<String>(); taintedRegs.add(register); final Set<String> newlyTaintedRegs = new TreeSet<String>(); final Set<String> untaintedRegs = new TreeSet<String>(); final Set<String> readRegs = new TreeSet<String>(); final Set<String> updatedRegs = new TreeSet<String>(); return new RegisterSetLatticeElement(taintedRegs, newlyTaintedRegs, untaintedRegs, readRegs, updatedRegs); } @Test public void testTransformAddEmptyState() { final RegisterTrackingTransformationProvider transformationProvider = new RegisterTrackingTransformationProvider(new RegisterTrackingOptions(false, new TreeSet<String>(), false, AnalysisDirection.UP)); final ReilInstruction instruction = ReilHelpers.createAdd(0, OperandSize.DWORD, "eax", OperandSize.DWORD, "ebx", OperandSize.DWORD, "ecx"); final Pair<RegisterSetLatticeElement, RegisterSetLatticeElement> transformationResult = transformationProvider.transformAdd(instruction, new RegisterSetLatticeElement()); Assert.assertNull(transformationResult.second()); Assert.assertTrue(transformationResult.first().getTaintedRegisters().isEmpty()); } @Test public void testTransformAddOutputIsTainted() { final RegisterTrackingTransformationProvider transformationProvider = new RegisterTrackingTransformationProvider(new RegisterTrackingOptions(false, new TreeSet<String>(), false, AnalysisDirection.UP)); final ReilInstruction instruction = ReilHelpers.createAdd(0, OperandSize.DWORD, "ecx", OperandSize.DWORD, "ebx", OperandSize.DWORD, "eax"); final Pair<RegisterSetLatticeElement, RegisterSetLatticeElement> transformationResult = transformationProvider.transformAdd(instruction, createTaintedState("eax")); Assert.assertNull(transformationResult.second()); Assert.assertFalse(transformationResult.first().isTainted("eax")); Assert.assertTrue(transformationResult.first().isTainted("ecx")); Assert.assertTrue(transformationResult.first().isTainted("ebx")); transformationResult.first().onInstructionExit(); Assert.assertTrue(transformationResult.first().getReadRegisters().contains("eax")); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().contains("ecx")); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().contains("ebx")); Assert.assertTrue(transformationResult.first().getUpdatedRegisters().isEmpty()); } @Test public void testTransformAnd() { final RegisterTrackingTransformationProvider transformationProvider = new RegisterTrackingTransformationProvider(new RegisterTrackingOptions(false, new TreeSet<String>(), false, AnalysisDirection.UP)); final ReilInstruction instruction = ReilHelpers.createAnd(0, OperandSize.DWORD, "ecx", OperandSize.DWORD, "ebx", OperandSize.DWORD, "eax"); final Pair<RegisterSetLatticeElement, RegisterSetLatticeElement> transformationResult = transformationProvider.transformAnd(instruction, createTaintedState("eax")); Assert.assertNull(transformationResult.second()); Assert.assertFalse(transformationResult.first().isTainted("eax")); Assert.assertTrue(transformationResult.first().isTainted("ecx")); Assert.assertTrue(transformationResult.first().isTainted("ebx")); transformationResult.first().onInstructionExit(); Assert.assertTrue(transformationResult.first().getReadRegisters().contains("eax")); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().contains("ecx")); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().contains("ebx")); Assert.assertTrue(transformationResult.first().getUpdatedRegisters().isEmpty()); } @Test public void testTransformAndZeroFirstArgument() { final RegisterTrackingTransformationProvider transformationProvider = new RegisterTrackingTransformationProvider(new RegisterTrackingOptions(false, new TreeSet<String>(), false, AnalysisDirection.UP)); final ReilInstruction instruction = ReilHelpers.createAnd(0, OperandSize.DWORD, String.valueOf(0), OperandSize.DWORD, "ecx", OperandSize.DWORD, "eax"); final Pair<RegisterSetLatticeElement, RegisterSetLatticeElement> transformationResult = transformationProvider.transformAnd(instruction, createTaintedState("eax")); Assert.assertNull(transformationResult.second()); Assert.assertFalse(transformationResult.first().isTainted("eax")); transformationResult.first().onInstructionExit(); Assert.assertTrue(transformationResult.first().getReadRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUpdatedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUntaintedRegisters().contains("eax")); } @Test public void testTransformAndZeroSecondArgument() { final RegisterTrackingTransformationProvider transformationProvider = new RegisterTrackingTransformationProvider(new RegisterTrackingOptions(false, new TreeSet<String>(), false, AnalysisDirection.UP)); final ReilInstruction instruction = ReilHelpers.createAnd(0, OperandSize.DWORD, "ecx", OperandSize.DWORD, String.valueOf(0), OperandSize.DWORD, "eax"); final Pair<RegisterSetLatticeElement, RegisterSetLatticeElement> transformationResult = transformationProvider.transformAnd(instruction, createTaintedState("eax")); Assert.assertNull(transformationResult.second()); Assert.assertFalse(transformationResult.first().isTainted("eax")); transformationResult.first().onInstructionExit(); Assert.assertTrue(transformationResult.first().getReadRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUpdatedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUntaintedRegisters().contains("eax")); } @Test public void testTransformBisz() { final RegisterTrackingTransformationProvider transformationProvider = new RegisterTrackingTransformationProvider(new RegisterTrackingOptions(false, new TreeSet<String>(), false, AnalysisDirection.UP)); final ReilInstruction instruction = ReilHelpers.createBisz(0, OperandSize.DWORD, "ecx", OperandSize.DWORD, "eax"); final Pair<RegisterSetLatticeElement, RegisterSetLatticeElement> transformationResult = transformationProvider.transformBisz(instruction, createTaintedState("eax")); Assert.assertTrue(transformationResult.first().getTaintedRegisters().contains("ecx")); } @Test public void testTransformBsh() { final RegisterTrackingTransformationProvider transformationProvider = new RegisterTrackingTransformationProvider(new RegisterTrackingOptions(false, new TreeSet<String>(), false, AnalysisDirection.UP)); final ReilInstruction instruction = ReilHelpers.createBsh(0, OperandSize.DWORD, "eax", OperandSize.DWORD, "ebx", OperandSize.DWORD, "ecx"); final Pair<RegisterSetLatticeElement, RegisterSetLatticeElement> transformationResult = transformationProvider.transformBsh(instruction, createTaintedState("ecx")); Assert.assertFalse(transformationResult.first().getTaintedRegisters().contains("ecx")); Assert.assertTrue(transformationResult.first().getTaintedRegisters().contains("eax")); Assert.assertTrue(transformationResult.first().getTaintedRegisters().contains("ebx")); transformationResult.first().onInstructionExit(); Assert.assertTrue(transformationResult.first().getReadRegisters().contains("ecx")); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().contains("eax")); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().contains("ebx")); Assert.assertTrue(transformationResult.first().getUpdatedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUntaintedRegisters().contains("ecx")); } @Test public void testTransformDiv() { final RegisterTrackingTransformationProvider transformationProvider = new RegisterTrackingTransformationProvider(new RegisterTrackingOptions(false, new TreeSet<String>(), false, AnalysisDirection.UP)); final ReilInstruction instruction = ReilHelpers.createDiv(0, OperandSize.DWORD, "eax", OperandSize.DWORD, "ebx", OperandSize.DWORD, "ecx"); final Pair<RegisterSetLatticeElement, RegisterSetLatticeElement> transformationResult = transformationProvider.transformDiv(instruction, createTaintedState("ecx")); Assert.assertFalse(transformationResult.first().getTaintedRegisters().contains("ecx")); Assert.assertTrue(transformationResult.first().getTaintedRegisters().contains("eax")); Assert.assertTrue(transformationResult.first().getTaintedRegisters().contains("ebx")); transformationResult.first().onInstructionExit(); Assert.assertTrue(transformationResult.first().getReadRegisters().contains("ecx")); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().contains("eax")); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().contains("ebx")); Assert.assertTrue(transformationResult.first().getUpdatedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUntaintedRegisters().contains("ecx")); } @Test public void testTransformJccFunctionCallClearAll() { final RegisterTrackingTransformationProvider transformationProvider = new RegisterTrackingTransformationProvider(new RegisterTrackingOptions(true, new TreeSet<String>(), false, AnalysisDirection.UP)); final ReilInstruction instruction = ReilHelpers.createJcc(0, OperandSize.DWORD, "eax", OperandSize.DWORD, "ecx", "isCall", "true"); final Pair<RegisterSetLatticeElement, RegisterSetLatticeElement> transformationResult = transformationProvider.transformJcc(instruction, createTaintedState("ecx")); transformationResult.first().onInstructionExit(); Assert.assertTrue(transformationResult.first().getTaintedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getReadRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUntaintedRegisters().contains("ecx")); Assert.assertTrue(transformationResult.first().getUpdatedRegisters().isEmpty()); } @Test public void testTransformJccFunctionCallClearSet() { final Set<String> cleared = new TreeSet<String>(); cleared.add("ecx"); final RegisterTrackingTransformationProvider transformationProvider = new RegisterTrackingTransformationProvider(new RegisterTrackingOptions(false, cleared, false, AnalysisDirection.UP)); final ReilInstruction instruction = ReilHelpers.createJcc(0, OperandSize.DWORD, "eax", OperandSize.DWORD, "ecx", "isCall", "true"); final Pair<RegisterSetLatticeElement, RegisterSetLatticeElement> transformationResult = transformationProvider.transformJcc(instruction, createTaintedState("ecx")); transformationResult.first().onInstructionExit(); Assert.assertTrue(transformationResult.first().getTaintedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getReadRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUntaintedRegisters().contains("ecx")); Assert.assertTrue(transformationResult.first().getUpdatedRegisters().isEmpty()); } @Test public void testTransformJccNoFunctionCallClear() { final Set<String> cleared = new TreeSet<String>(); cleared.add("ecx"); final RegisterTrackingTransformationProvider transformationProvider = new RegisterTrackingTransformationProvider(new RegisterTrackingOptions(false, cleared, false, AnalysisDirection.UP)); final ReilInstruction instruction = ReilHelpers.createJcc(0, OperandSize.DWORD, "eax", OperandSize.DWORD, "ecx"); final Pair<RegisterSetLatticeElement, RegisterSetLatticeElement> transformationResult = transformationProvider.transformJcc(instruction, createTaintedState("ecx")); transformationResult.first().onInstructionExit(); Assert.assertTrue(transformationResult.first().getTaintedRegisters().contains("ecx")); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getReadRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUntaintedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUpdatedRegisters().isEmpty()); } @Test public void testTransformJccNoTaintconditionVariable() { final RegisterTrackingTransformationProvider transformationProvider = new RegisterTrackingTransformationProvider(new RegisterTrackingOptions(false, new TreeSet<String>(), false, AnalysisDirection.UP)); final ReilInstruction instruction = ReilHelpers.createJcc(0, OperandSize.DWORD, "eax", OperandSize.DWORD, "ecx"); final Pair<RegisterSetLatticeElement, RegisterSetLatticeElement> transformationResult = transformationProvider.transformJcc(instruction, createTaintedState("ecx")); Assert.assertFalse(transformationResult.first().getTaintedRegisters().contains("eax")); } @Test public void testTransformLdm() { final RegisterTrackingTransformationProvider transformationProvider = new RegisterTrackingTransformationProvider(new RegisterTrackingOptions(false, new TreeSet<String>(), false, AnalysisDirection.UP)); final ReilInstruction instruction = ReilHelpers.createLdm(0, OperandSize.DWORD, "eax", OperandSize.DWORD, "ecx"); final Pair<RegisterSetLatticeElement, RegisterSetLatticeElement> transformationResult = transformationProvider.transformLdm(instruction, createTaintedState("ecx")); transformationResult.first().onInstructionExit(); Assert.assertTrue(transformationResult.first().getTaintedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getReadRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUntaintedRegisters().contains("ecx")); Assert.assertTrue(transformationResult.first().getUpdatedRegisters().isEmpty()); } @Test public void testTransformMod() { final RegisterTrackingTransformationProvider transformationProvider = new RegisterTrackingTransformationProvider(new RegisterTrackingOptions(false, new TreeSet<String>(), false, AnalysisDirection.UP)); final ReilInstruction instruction = ReilHelpers.createMod(0, OperandSize.DWORD, "ecx", OperandSize.DWORD, "ebx", OperandSize.DWORD, "eax"); final Pair<RegisterSetLatticeElement, RegisterSetLatticeElement> transformationResult = transformationProvider.transformMod(instruction, createTaintedState("eax")); Assert.assertNull(transformationResult.second()); Assert.assertFalse(transformationResult.first().isTainted("eax")); Assert.assertTrue(transformationResult.first().isTainted("ecx")); Assert.assertTrue(transformationResult.first().isTainted("ebx")); transformationResult.first().onInstructionExit(); Assert.assertTrue(transformationResult.first().getReadRegisters().contains("eax")); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().contains("ecx")); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().contains("ebx")); Assert.assertTrue(transformationResult.first().getUpdatedRegisters().isEmpty()); } @Test public void testTransformMul() { final RegisterTrackingTransformationProvider transformationProvider = new RegisterTrackingTransformationProvider(new RegisterTrackingOptions(false, new TreeSet<String>(), false, AnalysisDirection.UP)); final ReilInstruction instruction = ReilHelpers.createMul(0, OperandSize.DWORD, "ecx", OperandSize.DWORD, "ebx", OperandSize.DWORD, "eax"); final Pair<RegisterSetLatticeElement, RegisterSetLatticeElement> transformationResult = transformationProvider.transformMul(instruction, createTaintedState("eax")); Assert.assertNull(transformationResult.second()); Assert.assertFalse(transformationResult.first().isTainted("eax")); Assert.assertTrue(transformationResult.first().isTainted("ecx")); Assert.assertTrue(transformationResult.first().isTainted("ebx")); transformationResult.first().onInstructionExit(); Assert.assertTrue(transformationResult.first().getReadRegisters().contains("eax")); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().contains("ecx")); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().contains("ebx")); Assert.assertTrue(transformationResult.first().getUpdatedRegisters().isEmpty()); } @Test public void testTransformMulFirstZero() { final RegisterTrackingTransformationProvider transformationProvider = new RegisterTrackingTransformationProvider(new RegisterTrackingOptions(false, new TreeSet<String>(), false, AnalysisDirection.UP)); final ReilInstruction instruction = ReilHelpers.createMul(0, OperandSize.DWORD, "ecx", OperandSize.DWORD, String.valueOf("0"), OperandSize.DWORD, "eax"); final Pair<RegisterSetLatticeElement, RegisterSetLatticeElement> transformationResult = transformationProvider.transformMul(instruction, createTaintedState("eax")); Assert.assertNull(transformationResult.second()); transformationResult.first().onInstructionExit(); Assert.assertTrue(transformationResult.first().getTaintedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getReadRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUpdatedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUntaintedRegisters().contains("eax")); } @Test public void testTransformMulSecondZero() { final RegisterTrackingTransformationProvider transformationProvider = new RegisterTrackingTransformationProvider(new RegisterTrackingOptions(false, new TreeSet<String>(), false, AnalysisDirection.UP)); final ReilInstruction instruction = ReilHelpers.createMul(0, OperandSize.DWORD, String.valueOf("0"), OperandSize.DWORD, "ecx", OperandSize.DWORD, "eax"); final Pair<RegisterSetLatticeElement, RegisterSetLatticeElement> transformationResult = transformationProvider.transformMul(instruction, createTaintedState("eax")); Assert.assertNull(transformationResult.second()); transformationResult.first().onInstructionExit(); Assert.assertTrue(transformationResult.first().getTaintedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getReadRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUpdatedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUntaintedRegisters().contains("eax")); } @Test public void testTransformNop() { final RegisterTrackingTransformationProvider transformationProvider = new RegisterTrackingTransformationProvider(new RegisterTrackingOptions(false, new TreeSet<String>(), false, AnalysisDirection.UP)); final ReilInstruction instruction = ReilHelpers.createNop(0); final Pair<RegisterSetLatticeElement, RegisterSetLatticeElement> transformationResult = transformationProvider.transformAdd(instruction, createTaintedState("eax")); Assert.assertNull(transformationResult.second()); transformationResult.first().onInstructionExit(); Assert.assertTrue(transformationResult.first().getTaintedRegisters().contains("eax")); Assert.assertTrue(transformationResult.first().getReadRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUpdatedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUntaintedRegisters().isEmpty()); } @Test public void testTransformOr() { final RegisterTrackingTransformationProvider transformationProvider = new RegisterTrackingTransformationProvider(new RegisterTrackingOptions(false, new TreeSet<String>(), false, AnalysisDirection.UP)); final ReilInstruction instruction = ReilHelpers.createOr(0, OperandSize.DWORD, "ecx", OperandSize.DWORD, "ebx", OperandSize.DWORD, "eax"); final Pair<RegisterSetLatticeElement, RegisterSetLatticeElement> transformationResult = transformationProvider.transformOr(instruction, createTaintedState("eax")); Assert.assertNull(transformationResult.second()); transformationResult.first().onInstructionExit(); Assert.assertTrue(transformationResult.first().getTaintedRegisters().contains("ebx")); Assert.assertTrue(transformationResult.first().getTaintedRegisters().contains("ecx")); Assert.assertTrue(transformationResult.first().getReadRegisters().contains("eax")); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().contains("ebx")); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().contains("ecx")); Assert.assertTrue(transformationResult.first().getUpdatedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUntaintedRegisters().contains("eax")); } @Test public void testTransformOrFirstAllBits() { final RegisterTrackingTransformationProvider transformationProvider = new RegisterTrackingTransformationProvider(new RegisterTrackingOptions(false, new TreeSet<String>(), false, AnalysisDirection.UP)); final ReilInstruction instruction = ReilHelpers.createOr(0, OperandSize.DWORD, "ecx", OperandSize.DWORD, String.valueOf(0xFFFFFFFFL), OperandSize.DWORD, "eax"); final Pair<RegisterSetLatticeElement, RegisterSetLatticeElement> transformationResult = transformationProvider.transformOr(instruction, createTaintedState("eax")); Assert.assertNull(transformationResult.second()); transformationResult.first().onInstructionExit(); Assert.assertTrue(transformationResult.first().getTaintedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getReadRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUpdatedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUntaintedRegisters().contains("eax")); } @Test public void testTransformOrSecondAllBits() { final RegisterTrackingTransformationProvider transformationProvider = new RegisterTrackingTransformationProvider(new RegisterTrackingOptions(false, new TreeSet<String>(), false, AnalysisDirection.UP)); final ReilInstruction instruction = ReilHelpers.createOr(0, OperandSize.BYTE, String.valueOf(0xFFL), OperandSize.BYTE, "ecx", OperandSize.BYTE, "eax"); final Pair<RegisterSetLatticeElement, RegisterSetLatticeElement> transformationResult = transformationProvider.transformOr(instruction, createTaintedState("eax")); Assert.assertNull(transformationResult.second()); transformationResult.first().onInstructionExit(); Assert.assertTrue(transformationResult.first().getTaintedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getReadRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUpdatedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUntaintedRegisters().contains("eax")); } @Test public void testTransformStm() { final RegisterTrackingTransformationProvider transformationProvider = new RegisterTrackingTransformationProvider(new RegisterTrackingOptions(false, new TreeSet<String>(), false, AnalysisDirection.UP)); final ReilInstruction instruction = ReilHelpers.createStm(0, OperandSize.DWORD, "ecx", OperandSize.DWORD, "eax"); final Pair<RegisterSetLatticeElement, RegisterSetLatticeElement> transformationResult = transformationProvider.transformStm(instruction, createTaintedState("eax")); Assert.assertNull(transformationResult.second()); transformationResult.first().onInstructionExit(); Assert.assertTrue(transformationResult.first().getTaintedRegisters().contains("eax")); Assert.assertTrue(transformationResult.first().getReadRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUpdatedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUntaintedRegisters().isEmpty()); } @Test public void testTransformStr() { final RegisterTrackingTransformationProvider transformationProvider = new RegisterTrackingTransformationProvider(new RegisterTrackingOptions(false, new TreeSet<String>(), false, AnalysisDirection.UP)); final ReilInstruction instruction = ReilHelpers.createStr(0, OperandSize.DWORD, "ecx", OperandSize.DWORD, "eax"); final Pair<RegisterSetLatticeElement, RegisterSetLatticeElement> transformationResult = transformationProvider.transformStr(instruction, createTaintedState("eax")); Assert.assertNull(transformationResult.second()); transformationResult.first().onInstructionExit(); Assert.assertTrue(transformationResult.first().getTaintedRegisters().contains("ecx")); Assert.assertTrue(transformationResult.first().getReadRegisters().contains("eax")); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().contains("ecx")); Assert.assertTrue(transformationResult.first().getUpdatedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUntaintedRegisters().contains("eax")); } @Test public void testTransformSub() { final RegisterTrackingTransformationProvider transformationProvider = new RegisterTrackingTransformationProvider(new RegisterTrackingOptions(false, new TreeSet<String>(), false, AnalysisDirection.UP)); final ReilInstruction instruction = ReilHelpers.createSub(0, OperandSize.DWORD, "ecx", OperandSize.DWORD, "ebx", OperandSize.DWORD, "eax"); final Pair<RegisterSetLatticeElement, RegisterSetLatticeElement> transformationResult = transformationProvider.transformSub(instruction, createTaintedState("eax")); Assert.assertNull(transformationResult.second()); Assert.assertFalse(transformationResult.first().isTainted("eax")); Assert.assertTrue(transformationResult.first().isTainted("ecx")); Assert.assertTrue(transformationResult.first().isTainted("ebx")); transformationResult.first().onInstructionExit(); Assert.assertTrue(transformationResult.first().getReadRegisters().contains("eax")); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().contains("ecx")); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().contains("ebx")); Assert.assertTrue(transformationResult.first().getUpdatedRegisters().isEmpty()); } @Test public void testTransformSubIdenticalInput() { final RegisterTrackingTransformationProvider transformationProvider = new RegisterTrackingTransformationProvider(new RegisterTrackingOptions(false, new TreeSet<String>(), false, AnalysisDirection.UP)); final ReilInstruction instruction = ReilHelpers.createSub(0, OperandSize.DWORD, "ecx", OperandSize.DWORD, "ecx", OperandSize.DWORD, "eax"); final Pair<RegisterSetLatticeElement, RegisterSetLatticeElement> transformationResult = transformationProvider.transformSub(instruction, createTaintedState("eax")); Assert.assertNull(transformationResult.second()); transformationResult.first().onInstructionExit(); Assert.assertTrue(transformationResult.first().getTaintedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUntaintedRegisters().contains("eax")); Assert.assertTrue(transformationResult.first().getReadRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUpdatedRegisters().isEmpty()); } @Test public void testTransformUndef() { final RegisterTrackingTransformationProvider transformationProvider = new RegisterTrackingTransformationProvider(new RegisterTrackingOptions(false, new TreeSet<String>(), false, AnalysisDirection.UP)); final ReilInstruction instruction = ReilHelpers.createUndef(0, OperandSize.DWORD, "eax"); final Pair<RegisterSetLatticeElement, RegisterSetLatticeElement> transformationResult = transformationProvider.transformUndef(instruction, createTaintedState("eax")); Assert.assertNull(transformationResult.second()); transformationResult.first().onInstructionExit(); Assert.assertTrue(transformationResult.first().getTaintedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getReadRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUntaintedRegisters().contains("eax")); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUpdatedRegisters().isEmpty()); } @Test public void testTransformUnknown() { final RegisterTrackingTransformationProvider transformationProvider = new RegisterTrackingTransformationProvider(new RegisterTrackingOptions(false, new TreeSet<String>(), false, AnalysisDirection.UP)); final ReilInstruction instruction = ReilHelpers.createUnknown(0); final Pair<RegisterSetLatticeElement, RegisterSetLatticeElement> transformationResult = transformationProvider.transformUndef(instruction, createTaintedState("eax")); Assert.assertNull(transformationResult.second()); transformationResult.first().onInstructionExit(); Assert.assertTrue(transformationResult.first().getTaintedRegisters().contains("eax")); Assert.assertTrue(transformationResult.first().getReadRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUntaintedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUpdatedRegisters().isEmpty()); } @Test public void testTransformXor() { final RegisterTrackingTransformationProvider transformationProvider = new RegisterTrackingTransformationProvider(new RegisterTrackingOptions(false, new TreeSet<String>(), false, AnalysisDirection.UP)); final ReilInstruction instruction = ReilHelpers.createXor(0, OperandSize.DWORD, "ecx", OperandSize.DWORD, "ebx", OperandSize.DWORD, "eax"); final Pair<RegisterSetLatticeElement, RegisterSetLatticeElement> transformationResult = transformationProvider.transformXor(instruction, createTaintedState("eax")); Assert.assertNull(transformationResult.second()); transformationResult.first().onInstructionExit(); Assert.assertTrue(transformationResult.first().getTaintedRegisters().contains("ecx")); Assert.assertTrue(transformationResult.first().getTaintedRegisters().contains("ebx")); Assert.assertTrue(transformationResult.first().getReadRegisters().contains("eax")); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().contains("ecx")); Assert.assertTrue(transformationResult.first().getUpdatedRegisters().isEmpty()); } @Test public void testTransformXorSameOperands() { final RegisterTrackingTransformationProvider transformationProvider = new RegisterTrackingTransformationProvider(new RegisterTrackingOptions(false, new TreeSet<String>(), false, AnalysisDirection.UP)); final ReilInstruction instruction = ReilHelpers.createXor(0, OperandSize.DWORD, "ecx", OperandSize.DWORD, "ecx", OperandSize.DWORD, "eax"); final Pair<RegisterSetLatticeElement, RegisterSetLatticeElement> transformationResult = transformationProvider.transformXor(instruction, createTaintedState("eax")); Assert.assertNull(transformationResult.second()); transformationResult.first().onInstructionExit(); Assert.assertTrue(transformationResult.first().getTaintedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getReadRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getNewlyTaintedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUpdatedRegisters().isEmpty()); Assert.assertTrue(transformationResult.first().getUntaintedRegisters().contains("eax")); } }
googleapis/google-cloud-java
35,195
java-configdelivery/proto-google-cloud-configdelivery-v1beta/src/main/java/com/google/cloud/configdelivery/v1beta/DeleteReleaseRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/configdelivery/v1beta/config_delivery.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.configdelivery.v1beta; /** * * * <pre> * Message for deleting a Release * </pre> * * Protobuf type {@code google.cloud.configdelivery.v1beta.DeleteReleaseRequest} */ public final class DeleteReleaseRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.configdelivery.v1beta.DeleteReleaseRequest) DeleteReleaseRequestOrBuilder { private static final long serialVersionUID = 0L; // Use DeleteReleaseRequest.newBuilder() to construct. private DeleteReleaseRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DeleteReleaseRequest() { name_ = ""; requestId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new DeleteReleaseRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.configdelivery.v1beta.ConfigDeliveryProto .internal_static_google_cloud_configdelivery_v1beta_DeleteReleaseRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.configdelivery.v1beta.ConfigDeliveryProto .internal_static_google_cloud_configdelivery_v1beta_DeleteReleaseRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest.class, com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * Required. Name of the resource * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Required. Name of the resource * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int REQUEST_ID_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and the * request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code> * string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... } * </code> * * @return The requestId. */ @java.lang.Override public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and the * request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code> * string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... } * </code> * * @return The bytes for requestId. */ @java.lang.Override public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FORCE_FIELD_NUMBER = 3; private boolean force_ = false; /** * * * <pre> * Optional. If set to true, any variants of this release will also be * deleted. (Otherwise, the request will only work if the release has no * variants.) * </pre> * * <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The force. */ @java.lang.Override public boolean getForce() { return force_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, requestId_); } if (force_ != false) { output.writeBool(3, force_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, requestId_); } if (force_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, force_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest)) { return super.equals(obj); } com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest other = (com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest) obj; if (!getName().equals(other.getName())) return false; if (!getRequestId().equals(other.getRequestId())) return false; if (getForce() != other.getForce()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER; hash = (53 * hash) + getRequestId().hashCode(); hash = (37 * hash) + FORCE_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getForce()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Message for deleting a Release * </pre> * * Protobuf type {@code google.cloud.configdelivery.v1beta.DeleteReleaseRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.configdelivery.v1beta.DeleteReleaseRequest) com.google.cloud.configdelivery.v1beta.DeleteReleaseRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.configdelivery.v1beta.ConfigDeliveryProto .internal_static_google_cloud_configdelivery_v1beta_DeleteReleaseRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.configdelivery.v1beta.ConfigDeliveryProto .internal_static_google_cloud_configdelivery_v1beta_DeleteReleaseRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest.class, com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest.Builder.class); } // Construct using com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; requestId_ = ""; force_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.configdelivery.v1beta.ConfigDeliveryProto .internal_static_google_cloud_configdelivery_v1beta_DeleteReleaseRequest_descriptor; } @java.lang.Override public com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest getDefaultInstanceForType() { return com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest build() { com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest buildPartial() { com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest result = new com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.requestId_ = requestId_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.force_ = force_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest) { return mergeFrom((com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest other) { if (other == com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getRequestId().isEmpty()) { requestId_ = other.requestId_; bitField0_ |= 0x00000002; onChanged(); } if (other.getForce() != false) { setForce(other.getForce()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { requestId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { force_ = input.readBool(); bitField0_ |= 0x00000004; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * * * <pre> * Required. Name of the resource * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Name of the resource * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Name of the resource * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Name of the resource * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. Name of the resource * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and the * request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code> * string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... } * </code> * * @return The requestId. */ public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and the * request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code> * string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... } * </code> * * @return The bytes for requestId. */ public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and the * request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code> * string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... } * </code> * * @param value The requestId to set. * @return This builder for chaining. */ public Builder setRequestId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } requestId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and the * request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code> * string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearRequestId() { requestId_ = getDefaultInstance().getRequestId(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and the * request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code> * string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... } * </code> * * @param value The bytes for requestId to set. * @return This builder for chaining. */ public Builder setRequestIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); requestId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private boolean force_; /** * * * <pre> * Optional. If set to true, any variants of this release will also be * deleted. (Otherwise, the request will only work if the release has no * variants.) * </pre> * * <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The force. */ @java.lang.Override public boolean getForce() { return force_; } /** * * * <pre> * Optional. If set to true, any variants of this release will also be * deleted. (Otherwise, the request will only work if the release has no * variants.) * </pre> * * <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The force to set. * @return This builder for chaining. */ public Builder setForce(boolean value) { force_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. If set to true, any variants of this release will also be * deleted. (Otherwise, the request will only work if the release has no * variants.) * </pre> * * <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearForce() { bitField0_ = (bitField0_ & ~0x00000004); force_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.configdelivery.v1beta.DeleteReleaseRequest) } // @@protoc_insertion_point(class_scope:google.cloud.configdelivery.v1beta.DeleteReleaseRequest) private static final com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest(); } public static com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DeleteReleaseRequest> PARSER = new com.google.protobuf.AbstractParser<DeleteReleaseRequest>() { @java.lang.Override public DeleteReleaseRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<DeleteReleaseRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DeleteReleaseRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.configdelivery.v1beta.DeleteReleaseRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-api-java-client-services
35,240
clients/google-api-services-container/v1/1.31.0/com/google/api/services/container/model/ClusterUpdate.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.container.model; /** * ClusterUpdate describes an update to the cluster. Exactly one update can be applied to a cluster * with each request, so at most one field can be provided. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Kubernetes Engine API. For a detailed explanation * see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class ClusterUpdate extends com.google.api.client.json.GenericJson { /** * Configurations for the various addons available to run in the cluster. * The value may be {@code null}. */ @com.google.api.client.util.Key private AddonsConfig desiredAddonsConfig; /** * The desired authenticator groups config for the cluster. * The value may be {@code null}. */ @com.google.api.client.util.Key private AuthenticatorGroupsConfig desiredAuthenticatorGroupsConfig; /** * The desired configuration options for the Binary Authorization feature. * The value may be {@code null}. */ @com.google.api.client.util.Key private BinaryAuthorization desiredBinaryAuthorization; /** * Cluster-level autoscaling configuration. * The value may be {@code null}. */ @com.google.api.client.util.Key private ClusterAutoscaling desiredClusterAutoscaling; /** * Configuration of etcd encryption. * The value may be {@code null}. */ @com.google.api.client.util.Key private DatabaseEncryption desiredDatabaseEncryption; /** * The desired datapath provider for the cluster. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String desiredDatapathProvider; /** * The desired status of whether to disable default sNAT for this cluster. * The value may be {@code null}. */ @com.google.api.client.util.Key private DefaultSnatStatus desiredDefaultSnatStatus; /** * DNSConfig contains clusterDNS config for this cluster. * The value may be {@code null}. */ @com.google.api.client.util.Key private DNSConfig desiredDnsConfig; /** * The desired GCFS config for the cluster * The value may be {@code null}. */ @com.google.api.client.util.Key private GcfsConfig desiredGcfsConfig; /** * The desired Identity Service component configuration. * The value may be {@code null}. */ @com.google.api.client.util.Key private IdentityServiceConfig desiredIdentityServiceConfig; /** * The desired image type for the node pool. NOTE: Set the "desired_node_pool" field as well. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String desiredImageType; /** * The desired config of Intra-node visibility. * The value may be {@code null}. */ @com.google.api.client.util.Key private IntraNodeVisibilityConfig desiredIntraNodeVisibilityConfig; /** * The desired L4 Internal Load Balancer Subsetting configuration. * The value may be {@code null}. */ @com.google.api.client.util.Key private ILBSubsettingConfig desiredL4ilbSubsettingConfig; /** * The desired list of Google Compute Engine * [zones](https://cloud.google.com/compute/docs/zones#available) in which the cluster's nodes * should be located. This list must always include the cluster's primary zone. Warning: changing * cluster locations will update the locations of all node pools and will result in nodes being * added and/or removed. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> desiredLocations; /** * The desired logging configuration. * The value may be {@code null}. */ @com.google.api.client.util.Key private LoggingConfig desiredLoggingConfig; /** * The logging service the cluster should use to write logs. Currently available options: * * `logging.googleapis.com/kubernetes` - The Cloud Logging service with a Kubernetes-native * resource model * `logging.googleapis.com` - The legacy Cloud Logging service (no longer * available as of GKE 1.15). * `none` - no logs will be exported from the cluster. If left as an * empty string,`logging.googleapis.com/kubernetes` will be used for GKE 1.14+ or * `logging.googleapis.com` for earlier versions. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String desiredLoggingService; /** * The desired configuration options for master authorized networks feature. * The value may be {@code null}. */ @com.google.api.client.util.Key private MasterAuthorizedNetworksConfig desiredMasterAuthorizedNetworksConfig; /** * The Kubernetes version to change the master to. Users may specify either explicit versions * offered by Kubernetes Engine or version aliases, which have the following behavior: - "latest": * picks the highest valid Kubernetes version - "1.X": picks the highest valid patch+gke.N patch * in the 1.X version - "1.X.Y": picks the highest valid gke.N patch in the 1.X.Y version - * "1.X.Y-gke.N": picks an explicit Kubernetes version - "-": picks the default Kubernetes version * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String desiredMasterVersion; /** * Configuration for issuance of mTLS keys and certificates to Kubernetes pods. * The value may be {@code null}. */ @com.google.api.client.util.Key private MeshCertificates desiredMeshCertificates; /** * The desired monitoring configuration. * The value may be {@code null}. */ @com.google.api.client.util.Key private MonitoringConfig desiredMonitoringConfig; /** * The monitoring service the cluster should use to write metrics. Currently available options: * * "monitoring.googleapis.com/kubernetes" - The Cloud Monitoring service with a Kubernetes-native * resource model * `monitoring.googleapis.com` - The legacy Cloud Monitoring service (no longer * available as of GKE 1.15). * `none` - No metrics will be exported from the cluster. If left as * an empty string,`monitoring.googleapis.com/kubernetes` will be used for GKE 1.14+ or * `monitoring.googleapis.com` for earlier versions. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String desiredMonitoringService; /** * The desired network tags that apply to all auto-provisioned node pools in autopilot clusters * and node auto-provisioning enabled clusters. * The value may be {@code null}. */ @com.google.api.client.util.Key private NetworkTags desiredNodePoolAutoConfigNetworkTags; /** * Autoscaler configuration for the node pool specified in desired_node_pool_id. If there is only * one pool in the cluster and desired_node_pool_id is not provided then the change applies to * that single node pool. * The value may be {@code null}. */ @com.google.api.client.util.Key private NodePoolAutoscaling desiredNodePoolAutoscaling; /** * The node pool to be upgraded. This field is mandatory if "desired_node_version", * "desired_image_family" or "desired_node_pool_autoscaling" is specified and there is more than * one node pool on the cluster. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String desiredNodePoolId; /** * The Kubernetes version to change the nodes to (typically an upgrade). Users may specify either * explicit versions offered by Kubernetes Engine or version aliases, which have the following * behavior: - "latest": picks the highest valid Kubernetes version - "1.X": picks the highest * valid patch+gke.N patch in the 1.X version - "1.X.Y": picks the highest valid gke.N patch in * the 1.X.Y version - "1.X.Y-gke.N": picks an explicit Kubernetes version - "-": picks the * Kubernetes master version * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String desiredNodeVersion; /** * The desired notification configuration. * The value may be {@code null}. */ @com.google.api.client.util.Key private NotificationConfig desiredNotificationConfig; /** * The desired private cluster configuration. * The value may be {@code null}. */ @com.google.api.client.util.Key private PrivateClusterConfig desiredPrivateClusterConfig; /** * The desired state of IPv6 connectivity to Google Services. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String desiredPrivateIpv6GoogleAccess; /** * The desired release channel configuration. * The value may be {@code null}. */ @com.google.api.client.util.Key private ReleaseChannel desiredReleaseChannel; /** * The desired configuration for exporting resource usage. * The value may be {@code null}. */ @com.google.api.client.util.Key private ResourceUsageExportConfig desiredResourceUsageExportConfig; /** * ServiceExternalIPsConfig specifies the config for the use of Services with ExternalIPs field. * The value may be {@code null}. */ @com.google.api.client.util.Key private ServiceExternalIPsConfig desiredServiceExternalIpsConfig; /** * Configuration for Shielded Nodes. * The value may be {@code null}. */ @com.google.api.client.util.Key private ShieldedNodes desiredShieldedNodes; /** * Cluster-level Vertical Pod Autoscaling configuration. * The value may be {@code null}. */ @com.google.api.client.util.Key private VerticalPodAutoscaling desiredVerticalPodAutoscaling; /** * Configuration for Workload Identity. * The value may be {@code null}. */ @com.google.api.client.util.Key private WorkloadIdentityConfig desiredWorkloadIdentityConfig; /** * Configurations for the various addons available to run in the cluster. * @return value or {@code null} for none */ public AddonsConfig getDesiredAddonsConfig() { return desiredAddonsConfig; } /** * Configurations for the various addons available to run in the cluster. * @param desiredAddonsConfig desiredAddonsConfig or {@code null} for none */ public ClusterUpdate setDesiredAddonsConfig(AddonsConfig desiredAddonsConfig) { this.desiredAddonsConfig = desiredAddonsConfig; return this; } /** * The desired authenticator groups config for the cluster. * @return value or {@code null} for none */ public AuthenticatorGroupsConfig getDesiredAuthenticatorGroupsConfig() { return desiredAuthenticatorGroupsConfig; } /** * The desired authenticator groups config for the cluster. * @param desiredAuthenticatorGroupsConfig desiredAuthenticatorGroupsConfig or {@code null} for none */ public ClusterUpdate setDesiredAuthenticatorGroupsConfig(AuthenticatorGroupsConfig desiredAuthenticatorGroupsConfig) { this.desiredAuthenticatorGroupsConfig = desiredAuthenticatorGroupsConfig; return this; } /** * The desired configuration options for the Binary Authorization feature. * @return value or {@code null} for none */ public BinaryAuthorization getDesiredBinaryAuthorization() { return desiredBinaryAuthorization; } /** * The desired configuration options for the Binary Authorization feature. * @param desiredBinaryAuthorization desiredBinaryAuthorization or {@code null} for none */ public ClusterUpdate setDesiredBinaryAuthorization(BinaryAuthorization desiredBinaryAuthorization) { this.desiredBinaryAuthorization = desiredBinaryAuthorization; return this; } /** * Cluster-level autoscaling configuration. * @return value or {@code null} for none */ public ClusterAutoscaling getDesiredClusterAutoscaling() { return desiredClusterAutoscaling; } /** * Cluster-level autoscaling configuration. * @param desiredClusterAutoscaling desiredClusterAutoscaling or {@code null} for none */ public ClusterUpdate setDesiredClusterAutoscaling(ClusterAutoscaling desiredClusterAutoscaling) { this.desiredClusterAutoscaling = desiredClusterAutoscaling; return this; } /** * Configuration of etcd encryption. * @return value or {@code null} for none */ public DatabaseEncryption getDesiredDatabaseEncryption() { return desiredDatabaseEncryption; } /** * Configuration of etcd encryption. * @param desiredDatabaseEncryption desiredDatabaseEncryption or {@code null} for none */ public ClusterUpdate setDesiredDatabaseEncryption(DatabaseEncryption desiredDatabaseEncryption) { this.desiredDatabaseEncryption = desiredDatabaseEncryption; return this; } /** * The desired datapath provider for the cluster. * @return value or {@code null} for none */ public java.lang.String getDesiredDatapathProvider() { return desiredDatapathProvider; } /** * The desired datapath provider for the cluster. * @param desiredDatapathProvider desiredDatapathProvider or {@code null} for none */ public ClusterUpdate setDesiredDatapathProvider(java.lang.String desiredDatapathProvider) { this.desiredDatapathProvider = desiredDatapathProvider; return this; } /** * The desired status of whether to disable default sNAT for this cluster. * @return value or {@code null} for none */ public DefaultSnatStatus getDesiredDefaultSnatStatus() { return desiredDefaultSnatStatus; } /** * The desired status of whether to disable default sNAT for this cluster. * @param desiredDefaultSnatStatus desiredDefaultSnatStatus or {@code null} for none */ public ClusterUpdate setDesiredDefaultSnatStatus(DefaultSnatStatus desiredDefaultSnatStatus) { this.desiredDefaultSnatStatus = desiredDefaultSnatStatus; return this; } /** * DNSConfig contains clusterDNS config for this cluster. * @return value or {@code null} for none */ public DNSConfig getDesiredDnsConfig() { return desiredDnsConfig; } /** * DNSConfig contains clusterDNS config for this cluster. * @param desiredDnsConfig desiredDnsConfig or {@code null} for none */ public ClusterUpdate setDesiredDnsConfig(DNSConfig desiredDnsConfig) { this.desiredDnsConfig = desiredDnsConfig; return this; } /** * The desired GCFS config for the cluster * @return value or {@code null} for none */ public GcfsConfig getDesiredGcfsConfig() { return desiredGcfsConfig; } /** * The desired GCFS config for the cluster * @param desiredGcfsConfig desiredGcfsConfig or {@code null} for none */ public ClusterUpdate setDesiredGcfsConfig(GcfsConfig desiredGcfsConfig) { this.desiredGcfsConfig = desiredGcfsConfig; return this; } /** * The desired Identity Service component configuration. * @return value or {@code null} for none */ public IdentityServiceConfig getDesiredIdentityServiceConfig() { return desiredIdentityServiceConfig; } /** * The desired Identity Service component configuration. * @param desiredIdentityServiceConfig desiredIdentityServiceConfig or {@code null} for none */ public ClusterUpdate setDesiredIdentityServiceConfig(IdentityServiceConfig desiredIdentityServiceConfig) { this.desiredIdentityServiceConfig = desiredIdentityServiceConfig; return this; } /** * The desired image type for the node pool. NOTE: Set the "desired_node_pool" field as well. * @return value or {@code null} for none */ public java.lang.String getDesiredImageType() { return desiredImageType; } /** * The desired image type for the node pool. NOTE: Set the "desired_node_pool" field as well. * @param desiredImageType desiredImageType or {@code null} for none */ public ClusterUpdate setDesiredImageType(java.lang.String desiredImageType) { this.desiredImageType = desiredImageType; return this; } /** * The desired config of Intra-node visibility. * @return value or {@code null} for none */ public IntraNodeVisibilityConfig getDesiredIntraNodeVisibilityConfig() { return desiredIntraNodeVisibilityConfig; } /** * The desired config of Intra-node visibility. * @param desiredIntraNodeVisibilityConfig desiredIntraNodeVisibilityConfig or {@code null} for none */ public ClusterUpdate setDesiredIntraNodeVisibilityConfig(IntraNodeVisibilityConfig desiredIntraNodeVisibilityConfig) { this.desiredIntraNodeVisibilityConfig = desiredIntraNodeVisibilityConfig; return this; } /** * The desired L4 Internal Load Balancer Subsetting configuration. * @return value or {@code null} for none */ public ILBSubsettingConfig getDesiredL4ilbSubsettingConfig() { return desiredL4ilbSubsettingConfig; } /** * The desired L4 Internal Load Balancer Subsetting configuration. * @param desiredL4ilbSubsettingConfig desiredL4ilbSubsettingConfig or {@code null} for none */ public ClusterUpdate setDesiredL4ilbSubsettingConfig(ILBSubsettingConfig desiredL4ilbSubsettingConfig) { this.desiredL4ilbSubsettingConfig = desiredL4ilbSubsettingConfig; return this; } /** * The desired list of Google Compute Engine * [zones](https://cloud.google.com/compute/docs/zones#available) in which the cluster's nodes * should be located. This list must always include the cluster's primary zone. Warning: changing * cluster locations will update the locations of all node pools and will result in nodes being * added and/or removed. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getDesiredLocations() { return desiredLocations; } /** * The desired list of Google Compute Engine * [zones](https://cloud.google.com/compute/docs/zones#available) in which the cluster's nodes * should be located. This list must always include the cluster's primary zone. Warning: changing * cluster locations will update the locations of all node pools and will result in nodes being * added and/or removed. * @param desiredLocations desiredLocations or {@code null} for none */ public ClusterUpdate setDesiredLocations(java.util.List<java.lang.String> desiredLocations) { this.desiredLocations = desiredLocations; return this; } /** * The desired logging configuration. * @return value or {@code null} for none */ public LoggingConfig getDesiredLoggingConfig() { return desiredLoggingConfig; } /** * The desired logging configuration. * @param desiredLoggingConfig desiredLoggingConfig or {@code null} for none */ public ClusterUpdate setDesiredLoggingConfig(LoggingConfig desiredLoggingConfig) { this.desiredLoggingConfig = desiredLoggingConfig; return this; } /** * The logging service the cluster should use to write logs. Currently available options: * * `logging.googleapis.com/kubernetes` - The Cloud Logging service with a Kubernetes-native * resource model * `logging.googleapis.com` - The legacy Cloud Logging service (no longer * available as of GKE 1.15). * `none` - no logs will be exported from the cluster. If left as an * empty string,`logging.googleapis.com/kubernetes` will be used for GKE 1.14+ or * `logging.googleapis.com` for earlier versions. * @return value or {@code null} for none */ public java.lang.String getDesiredLoggingService() { return desiredLoggingService; } /** * The logging service the cluster should use to write logs. Currently available options: * * `logging.googleapis.com/kubernetes` - The Cloud Logging service with a Kubernetes-native * resource model * `logging.googleapis.com` - The legacy Cloud Logging service (no longer * available as of GKE 1.15). * `none` - no logs will be exported from the cluster. If left as an * empty string,`logging.googleapis.com/kubernetes` will be used for GKE 1.14+ or * `logging.googleapis.com` for earlier versions. * @param desiredLoggingService desiredLoggingService or {@code null} for none */ public ClusterUpdate setDesiredLoggingService(java.lang.String desiredLoggingService) { this.desiredLoggingService = desiredLoggingService; return this; } /** * The desired configuration options for master authorized networks feature. * @return value or {@code null} for none */ public MasterAuthorizedNetworksConfig getDesiredMasterAuthorizedNetworksConfig() { return desiredMasterAuthorizedNetworksConfig; } /** * The desired configuration options for master authorized networks feature. * @param desiredMasterAuthorizedNetworksConfig desiredMasterAuthorizedNetworksConfig or {@code null} for none */ public ClusterUpdate setDesiredMasterAuthorizedNetworksConfig(MasterAuthorizedNetworksConfig desiredMasterAuthorizedNetworksConfig) { this.desiredMasterAuthorizedNetworksConfig = desiredMasterAuthorizedNetworksConfig; return this; } /** * The Kubernetes version to change the master to. Users may specify either explicit versions * offered by Kubernetes Engine or version aliases, which have the following behavior: - "latest": * picks the highest valid Kubernetes version - "1.X": picks the highest valid patch+gke.N patch * in the 1.X version - "1.X.Y": picks the highest valid gke.N patch in the 1.X.Y version - * "1.X.Y-gke.N": picks an explicit Kubernetes version - "-": picks the default Kubernetes version * @return value or {@code null} for none */ public java.lang.String getDesiredMasterVersion() { return desiredMasterVersion; } /** * The Kubernetes version to change the master to. Users may specify either explicit versions * offered by Kubernetes Engine or version aliases, which have the following behavior: - "latest": * picks the highest valid Kubernetes version - "1.X": picks the highest valid patch+gke.N patch * in the 1.X version - "1.X.Y": picks the highest valid gke.N patch in the 1.X.Y version - * "1.X.Y-gke.N": picks an explicit Kubernetes version - "-": picks the default Kubernetes version * @param desiredMasterVersion desiredMasterVersion or {@code null} for none */ public ClusterUpdate setDesiredMasterVersion(java.lang.String desiredMasterVersion) { this.desiredMasterVersion = desiredMasterVersion; return this; } /** * Configuration for issuance of mTLS keys and certificates to Kubernetes pods. * @return value or {@code null} for none */ public MeshCertificates getDesiredMeshCertificates() { return desiredMeshCertificates; } /** * Configuration for issuance of mTLS keys and certificates to Kubernetes pods. * @param desiredMeshCertificates desiredMeshCertificates or {@code null} for none */ public ClusterUpdate setDesiredMeshCertificates(MeshCertificates desiredMeshCertificates) { this.desiredMeshCertificates = desiredMeshCertificates; return this; } /** * The desired monitoring configuration. * @return value or {@code null} for none */ public MonitoringConfig getDesiredMonitoringConfig() { return desiredMonitoringConfig; } /** * The desired monitoring configuration. * @param desiredMonitoringConfig desiredMonitoringConfig or {@code null} for none */ public ClusterUpdate setDesiredMonitoringConfig(MonitoringConfig desiredMonitoringConfig) { this.desiredMonitoringConfig = desiredMonitoringConfig; return this; } /** * The monitoring service the cluster should use to write metrics. Currently available options: * * "monitoring.googleapis.com/kubernetes" - The Cloud Monitoring service with a Kubernetes-native * resource model * `monitoring.googleapis.com` - The legacy Cloud Monitoring service (no longer * available as of GKE 1.15). * `none` - No metrics will be exported from the cluster. If left as * an empty string,`monitoring.googleapis.com/kubernetes` will be used for GKE 1.14+ or * `monitoring.googleapis.com` for earlier versions. * @return value or {@code null} for none */ public java.lang.String getDesiredMonitoringService() { return desiredMonitoringService; } /** * The monitoring service the cluster should use to write metrics. Currently available options: * * "monitoring.googleapis.com/kubernetes" - The Cloud Monitoring service with a Kubernetes-native * resource model * `monitoring.googleapis.com` - The legacy Cloud Monitoring service (no longer * available as of GKE 1.15). * `none` - No metrics will be exported from the cluster. If left as * an empty string,`monitoring.googleapis.com/kubernetes` will be used for GKE 1.14+ or * `monitoring.googleapis.com` for earlier versions. * @param desiredMonitoringService desiredMonitoringService or {@code null} for none */ public ClusterUpdate setDesiredMonitoringService(java.lang.String desiredMonitoringService) { this.desiredMonitoringService = desiredMonitoringService; return this; } /** * The desired network tags that apply to all auto-provisioned node pools in autopilot clusters * and node auto-provisioning enabled clusters. * @return value or {@code null} for none */ public NetworkTags getDesiredNodePoolAutoConfigNetworkTags() { return desiredNodePoolAutoConfigNetworkTags; } /** * The desired network tags that apply to all auto-provisioned node pools in autopilot clusters * and node auto-provisioning enabled clusters. * @param desiredNodePoolAutoConfigNetworkTags desiredNodePoolAutoConfigNetworkTags or {@code null} for none */ public ClusterUpdate setDesiredNodePoolAutoConfigNetworkTags(NetworkTags desiredNodePoolAutoConfigNetworkTags) { this.desiredNodePoolAutoConfigNetworkTags = desiredNodePoolAutoConfigNetworkTags; return this; } /** * Autoscaler configuration for the node pool specified in desired_node_pool_id. If there is only * one pool in the cluster and desired_node_pool_id is not provided then the change applies to * that single node pool. * @return value or {@code null} for none */ public NodePoolAutoscaling getDesiredNodePoolAutoscaling() { return desiredNodePoolAutoscaling; } /** * Autoscaler configuration for the node pool specified in desired_node_pool_id. If there is only * one pool in the cluster and desired_node_pool_id is not provided then the change applies to * that single node pool. * @param desiredNodePoolAutoscaling desiredNodePoolAutoscaling or {@code null} for none */ public ClusterUpdate setDesiredNodePoolAutoscaling(NodePoolAutoscaling desiredNodePoolAutoscaling) { this.desiredNodePoolAutoscaling = desiredNodePoolAutoscaling; return this; } /** * The node pool to be upgraded. This field is mandatory if "desired_node_version", * "desired_image_family" or "desired_node_pool_autoscaling" is specified and there is more than * one node pool on the cluster. * @return value or {@code null} for none */ public java.lang.String getDesiredNodePoolId() { return desiredNodePoolId; } /** * The node pool to be upgraded. This field is mandatory if "desired_node_version", * "desired_image_family" or "desired_node_pool_autoscaling" is specified and there is more than * one node pool on the cluster. * @param desiredNodePoolId desiredNodePoolId or {@code null} for none */ public ClusterUpdate setDesiredNodePoolId(java.lang.String desiredNodePoolId) { this.desiredNodePoolId = desiredNodePoolId; return this; } /** * The Kubernetes version to change the nodes to (typically an upgrade). Users may specify either * explicit versions offered by Kubernetes Engine or version aliases, which have the following * behavior: - "latest": picks the highest valid Kubernetes version - "1.X": picks the highest * valid patch+gke.N patch in the 1.X version - "1.X.Y": picks the highest valid gke.N patch in * the 1.X.Y version - "1.X.Y-gke.N": picks an explicit Kubernetes version - "-": picks the * Kubernetes master version * @return value or {@code null} for none */ public java.lang.String getDesiredNodeVersion() { return desiredNodeVersion; } /** * The Kubernetes version to change the nodes to (typically an upgrade). Users may specify either * explicit versions offered by Kubernetes Engine or version aliases, which have the following * behavior: - "latest": picks the highest valid Kubernetes version - "1.X": picks the highest * valid patch+gke.N patch in the 1.X version - "1.X.Y": picks the highest valid gke.N patch in * the 1.X.Y version - "1.X.Y-gke.N": picks an explicit Kubernetes version - "-": picks the * Kubernetes master version * @param desiredNodeVersion desiredNodeVersion or {@code null} for none */ public ClusterUpdate setDesiredNodeVersion(java.lang.String desiredNodeVersion) { this.desiredNodeVersion = desiredNodeVersion; return this; } /** * The desired notification configuration. * @return value or {@code null} for none */ public NotificationConfig getDesiredNotificationConfig() { return desiredNotificationConfig; } /** * The desired notification configuration. * @param desiredNotificationConfig desiredNotificationConfig or {@code null} for none */ public ClusterUpdate setDesiredNotificationConfig(NotificationConfig desiredNotificationConfig) { this.desiredNotificationConfig = desiredNotificationConfig; return this; } /** * The desired private cluster configuration. * @return value or {@code null} for none */ public PrivateClusterConfig getDesiredPrivateClusterConfig() { return desiredPrivateClusterConfig; } /** * The desired private cluster configuration. * @param desiredPrivateClusterConfig desiredPrivateClusterConfig or {@code null} for none */ public ClusterUpdate setDesiredPrivateClusterConfig(PrivateClusterConfig desiredPrivateClusterConfig) { this.desiredPrivateClusterConfig = desiredPrivateClusterConfig; return this; } /** * The desired state of IPv6 connectivity to Google Services. * @return value or {@code null} for none */ public java.lang.String getDesiredPrivateIpv6GoogleAccess() { return desiredPrivateIpv6GoogleAccess; } /** * The desired state of IPv6 connectivity to Google Services. * @param desiredPrivateIpv6GoogleAccess desiredPrivateIpv6GoogleAccess or {@code null} for none */ public ClusterUpdate setDesiredPrivateIpv6GoogleAccess(java.lang.String desiredPrivateIpv6GoogleAccess) { this.desiredPrivateIpv6GoogleAccess = desiredPrivateIpv6GoogleAccess; return this; } /** * The desired release channel configuration. * @return value or {@code null} for none */ public ReleaseChannel getDesiredReleaseChannel() { return desiredReleaseChannel; } /** * The desired release channel configuration. * @param desiredReleaseChannel desiredReleaseChannel or {@code null} for none */ public ClusterUpdate setDesiredReleaseChannel(ReleaseChannel desiredReleaseChannel) { this.desiredReleaseChannel = desiredReleaseChannel; return this; } /** * The desired configuration for exporting resource usage. * @return value or {@code null} for none */ public ResourceUsageExportConfig getDesiredResourceUsageExportConfig() { return desiredResourceUsageExportConfig; } /** * The desired configuration for exporting resource usage. * @param desiredResourceUsageExportConfig desiredResourceUsageExportConfig or {@code null} for none */ public ClusterUpdate setDesiredResourceUsageExportConfig(ResourceUsageExportConfig desiredResourceUsageExportConfig) { this.desiredResourceUsageExportConfig = desiredResourceUsageExportConfig; return this; } /** * ServiceExternalIPsConfig specifies the config for the use of Services with ExternalIPs field. * @return value or {@code null} for none */ public ServiceExternalIPsConfig getDesiredServiceExternalIpsConfig() { return desiredServiceExternalIpsConfig; } /** * ServiceExternalIPsConfig specifies the config for the use of Services with ExternalIPs field. * @param desiredServiceExternalIpsConfig desiredServiceExternalIpsConfig or {@code null} for none */ public ClusterUpdate setDesiredServiceExternalIpsConfig(ServiceExternalIPsConfig desiredServiceExternalIpsConfig) { this.desiredServiceExternalIpsConfig = desiredServiceExternalIpsConfig; return this; } /** * Configuration for Shielded Nodes. * @return value or {@code null} for none */ public ShieldedNodes getDesiredShieldedNodes() { return desiredShieldedNodes; } /** * Configuration for Shielded Nodes. * @param desiredShieldedNodes desiredShieldedNodes or {@code null} for none */ public ClusterUpdate setDesiredShieldedNodes(ShieldedNodes desiredShieldedNodes) { this.desiredShieldedNodes = desiredShieldedNodes; return this; } /** * Cluster-level Vertical Pod Autoscaling configuration. * @return value or {@code null} for none */ public VerticalPodAutoscaling getDesiredVerticalPodAutoscaling() { return desiredVerticalPodAutoscaling; } /** * Cluster-level Vertical Pod Autoscaling configuration. * @param desiredVerticalPodAutoscaling desiredVerticalPodAutoscaling or {@code null} for none */ public ClusterUpdate setDesiredVerticalPodAutoscaling(VerticalPodAutoscaling desiredVerticalPodAutoscaling) { this.desiredVerticalPodAutoscaling = desiredVerticalPodAutoscaling; return this; } /** * Configuration for Workload Identity. * @return value or {@code null} for none */ public WorkloadIdentityConfig getDesiredWorkloadIdentityConfig() { return desiredWorkloadIdentityConfig; } /** * Configuration for Workload Identity. * @param desiredWorkloadIdentityConfig desiredWorkloadIdentityConfig or {@code null} for none */ public ClusterUpdate setDesiredWorkloadIdentityConfig(WorkloadIdentityConfig desiredWorkloadIdentityConfig) { this.desiredWorkloadIdentityConfig = desiredWorkloadIdentityConfig; return this; } @Override public ClusterUpdate set(String fieldName, Object value) { return (ClusterUpdate) super.set(fieldName, value); } @Override public ClusterUpdate clone() { return (ClusterUpdate) super.clone(); } }
googleapis/google-cloud-java
35,420
java-dialogflow-cx/google-cloud-dialogflow-cx/src/main/java/com/google/cloud/dialogflow/cx/v3/stub/IntentsStubSettings.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.dialogflow.cx.v3.stub; import static com.google.cloud.dialogflow.cx.v3.IntentsClient.ListIntentsPagedResponse; import static com.google.cloud.dialogflow.cx.v3.IntentsClient.ListLocationsPagedResponse; import com.google.api.core.ApiFunction; import com.google.api.core.ApiFuture; import com.google.api.core.BetaApi; import com.google.api.core.ObsoleteApi; import com.google.api.gax.core.GaxProperties; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.GrpcTransportChannel; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.grpc.ProtoOperationTransformers; import com.google.api.gax.httpjson.GaxHttpJsonProperties; import com.google.api.gax.httpjson.HttpJsonTransportChannel; import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider; import com.google.api.gax.longrunning.OperationSnapshot; import com.google.api.gax.longrunning.OperationTimedPollAlgorithm; import com.google.api.gax.retrying.RetrySettings; import com.google.api.gax.rpc.ApiCallContext; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallSettings; import com.google.api.gax.rpc.PageContext; import com.google.api.gax.rpc.PagedCallSettings; import com.google.api.gax.rpc.PagedListDescriptor; import com.google.api.gax.rpc.PagedListResponseFactory; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.dialogflow.cx.v3.CreateIntentRequest; import com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest; import com.google.cloud.dialogflow.cx.v3.ExportIntentsMetadata; import com.google.cloud.dialogflow.cx.v3.ExportIntentsRequest; import com.google.cloud.dialogflow.cx.v3.ExportIntentsResponse; import com.google.cloud.dialogflow.cx.v3.GetIntentRequest; import com.google.cloud.dialogflow.cx.v3.ImportIntentsMetadata; import com.google.cloud.dialogflow.cx.v3.ImportIntentsRequest; import com.google.cloud.dialogflow.cx.v3.ImportIntentsResponse; import com.google.cloud.dialogflow.cx.v3.Intent; import com.google.cloud.dialogflow.cx.v3.ListIntentsRequest; import com.google.cloud.dialogflow.cx.v3.ListIntentsResponse; import com.google.cloud.dialogflow.cx.v3.UpdateIntentRequest; import com.google.cloud.location.GetLocationRequest; import com.google.cloud.location.ListLocationsRequest; import com.google.cloud.location.ListLocationsResponse; import com.google.cloud.location.Location; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.longrunning.Operation; import com.google.protobuf.Empty; import java.io.IOException; import java.time.Duration; import java.util.List; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link IntentsStub}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (dialogflow.googleapis.com) and default port (443) are used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the * [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings) * of getIntent: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * IntentsStubSettings.Builder intentsSettingsBuilder = IntentsStubSettings.newBuilder(); * intentsSettingsBuilder * .getIntentSettings() * .setRetrySettings( * intentsSettingsBuilder * .getIntentSettings() * .getRetrySettings() * .toBuilder() * .setInitialRetryDelayDuration(Duration.ofSeconds(1)) * .setInitialRpcTimeoutDuration(Duration.ofSeconds(5)) * .setMaxAttempts(5) * .setMaxRetryDelayDuration(Duration.ofSeconds(30)) * .setMaxRpcTimeoutDuration(Duration.ofSeconds(60)) * .setRetryDelayMultiplier(1.3) * .setRpcTimeoutMultiplier(1.5) * .setTotalTimeoutDuration(Duration.ofSeconds(300)) * .build()); * IntentsStubSettings intentsSettings = intentsSettingsBuilder.build(); * }</pre> * * Please refer to the [Client Side Retry * Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for * additional support in setting retries. * * <p>To configure the RetrySettings of a Long Running Operation method, create an * OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to * configure the RetrySettings for importIntents: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * IntentsStubSettings.Builder intentsSettingsBuilder = IntentsStubSettings.newBuilder(); * TimedRetryAlgorithm timedRetryAlgorithm = * OperationalTimedPollAlgorithm.create( * RetrySettings.newBuilder() * .setInitialRetryDelayDuration(Duration.ofMillis(500)) * .setRetryDelayMultiplier(1.5) * .setMaxRetryDelayDuration(Duration.ofMillis(5000)) * .setTotalTimeoutDuration(Duration.ofHours(24)) * .build()); * intentsSettingsBuilder * .createClusterOperationSettings() * .setPollingAlgorithm(timedRetryAlgorithm) * .build(); * }</pre> */ @Generated("by gapic-generator-java") public class IntentsStubSettings extends StubSettings<IntentsStubSettings> { /** The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder() .add("https://www.googleapis.com/auth/cloud-platform") .add("https://www.googleapis.com/auth/dialogflow") .build(); private final PagedCallSettings<ListIntentsRequest, ListIntentsResponse, ListIntentsPagedResponse> listIntentsSettings; private final UnaryCallSettings<GetIntentRequest, Intent> getIntentSettings; private final UnaryCallSettings<CreateIntentRequest, Intent> createIntentSettings; private final UnaryCallSettings<UpdateIntentRequest, Intent> updateIntentSettings; private final UnaryCallSettings<DeleteIntentRequest, Empty> deleteIntentSettings; private final UnaryCallSettings<ImportIntentsRequest, Operation> importIntentsSettings; private final OperationCallSettings< ImportIntentsRequest, ImportIntentsResponse, ImportIntentsMetadata> importIntentsOperationSettings; private final UnaryCallSettings<ExportIntentsRequest, Operation> exportIntentsSettings; private final OperationCallSettings< ExportIntentsRequest, ExportIntentsResponse, ExportIntentsMetadata> exportIntentsOperationSettings; private final PagedCallSettings< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings; private final UnaryCallSettings<GetLocationRequest, Location> getLocationSettings; private static final PagedListDescriptor<ListIntentsRequest, ListIntentsResponse, Intent> LIST_INTENTS_PAGE_STR_DESC = new PagedListDescriptor<ListIntentsRequest, ListIntentsResponse, Intent>() { @Override public String emptyToken() { return ""; } @Override public ListIntentsRequest injectToken(ListIntentsRequest payload, String token) { return ListIntentsRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListIntentsRequest injectPageSize(ListIntentsRequest payload, int pageSize) { return ListIntentsRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(ListIntentsRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListIntentsResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<Intent> extractResources(ListIntentsResponse payload) { return payload.getIntentsList(); } }; private static final PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location> LIST_LOCATIONS_PAGE_STR_DESC = new PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location>() { @Override public String emptyToken() { return ""; } @Override public ListLocationsRequest injectToken(ListLocationsRequest payload, String token) { return ListLocationsRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListLocationsRequest injectPageSize(ListLocationsRequest payload, int pageSize) { return ListLocationsRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(ListLocationsRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListLocationsResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<Location> extractResources(ListLocationsResponse payload) { return payload.getLocationsList(); } }; private static final PagedListResponseFactory< ListIntentsRequest, ListIntentsResponse, ListIntentsPagedResponse> LIST_INTENTS_PAGE_STR_FACT = new PagedListResponseFactory< ListIntentsRequest, ListIntentsResponse, ListIntentsPagedResponse>() { @Override public ApiFuture<ListIntentsPagedResponse> getFuturePagedResponse( UnaryCallable<ListIntentsRequest, ListIntentsResponse> callable, ListIntentsRequest request, ApiCallContext context, ApiFuture<ListIntentsResponse> futureResponse) { PageContext<ListIntentsRequest, ListIntentsResponse, Intent> pageContext = PageContext.create(callable, LIST_INTENTS_PAGE_STR_DESC, request, context); return ListIntentsPagedResponse.createAsync(pageContext, futureResponse); } }; private static final PagedListResponseFactory< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> LIST_LOCATIONS_PAGE_STR_FACT = new PagedListResponseFactory< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>() { @Override public ApiFuture<ListLocationsPagedResponse> getFuturePagedResponse( UnaryCallable<ListLocationsRequest, ListLocationsResponse> callable, ListLocationsRequest request, ApiCallContext context, ApiFuture<ListLocationsResponse> futureResponse) { PageContext<ListLocationsRequest, ListLocationsResponse, Location> pageContext = PageContext.create(callable, LIST_LOCATIONS_PAGE_STR_DESC, request, context); return ListLocationsPagedResponse.createAsync(pageContext, futureResponse); } }; /** Returns the object with the settings used for calls to listIntents. */ public PagedCallSettings<ListIntentsRequest, ListIntentsResponse, ListIntentsPagedResponse> listIntentsSettings() { return listIntentsSettings; } /** Returns the object with the settings used for calls to getIntent. */ public UnaryCallSettings<GetIntentRequest, Intent> getIntentSettings() { return getIntentSettings; } /** Returns the object with the settings used for calls to createIntent. */ public UnaryCallSettings<CreateIntentRequest, Intent> createIntentSettings() { return createIntentSettings; } /** Returns the object with the settings used for calls to updateIntent. */ public UnaryCallSettings<UpdateIntentRequest, Intent> updateIntentSettings() { return updateIntentSettings; } /** Returns the object with the settings used for calls to deleteIntent. */ public UnaryCallSettings<DeleteIntentRequest, Empty> deleteIntentSettings() { return deleteIntentSettings; } /** Returns the object with the settings used for calls to importIntents. */ public UnaryCallSettings<ImportIntentsRequest, Operation> importIntentsSettings() { return importIntentsSettings; } /** Returns the object with the settings used for calls to importIntents. */ public OperationCallSettings<ImportIntentsRequest, ImportIntentsResponse, ImportIntentsMetadata> importIntentsOperationSettings() { return importIntentsOperationSettings; } /** Returns the object with the settings used for calls to exportIntents. */ public UnaryCallSettings<ExportIntentsRequest, Operation> exportIntentsSettings() { return exportIntentsSettings; } /** Returns the object with the settings used for calls to exportIntents. */ public OperationCallSettings<ExportIntentsRequest, ExportIntentsResponse, ExportIntentsMetadata> exportIntentsOperationSettings() { return exportIntentsOperationSettings; } /** Returns the object with the settings used for calls to listLocations. */ public PagedCallSettings<ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings() { return listLocationsSettings; } /** Returns the object with the settings used for calls to getLocation. */ public UnaryCallSettings<GetLocationRequest, Location> getLocationSettings() { return getLocationSettings; } public IntentsStub createStub() throws IOException { if (getTransportChannelProvider() .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcIntentsStub.create(this); } if (getTransportChannelProvider() .getTransportName() .equals(HttpJsonTransportChannel.getHttpJsonTransportName())) { return HttpJsonIntentsStub.create(this); } throw new UnsupportedOperationException( String.format( "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns the default service name. */ @Override public String getServiceName() { return "dialogflow"; } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstantiatingExecutorProvider.newBuilder(); } /** Returns the default service endpoint. */ @ObsoleteApi("Use getEndpoint() instead") public static String getDefaultEndpoint() { return "dialogflow.googleapis.com:443"; } /** Returns the default mTLS service endpoint. */ public static String getDefaultMtlsEndpoint() { return "dialogflow.mtls.googleapis.com:443"; } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return GoogleCredentialsProvider.newBuilder() .setScopesToApply(DEFAULT_SERVICE_SCOPES) .setUseJwtAccessWithScope(true); } /** Returns a builder for the default gRPC ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return InstantiatingGrpcChannelProvider.newBuilder() .setMaxInboundMessageSize(Integer.MAX_VALUE); } /** Returns a builder for the default REST ChannelProvider for this service. */ @BetaApi public static InstantiatingHttpJsonChannelProvider.Builder defaultHttpJsonTransportProviderBuilder() { return InstantiatingHttpJsonChannelProvider.newBuilder(); } public static TransportChannelProvider defaultTransportChannelProvider() { return defaultGrpcTransportProviderBuilder().build(); } public static ApiClientHeaderProvider.Builder defaultGrpcApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken("gapic", GaxProperties.getLibraryVersion(IntentsStubSettings.class)) .setTransportToken( GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion()); } public static ApiClientHeaderProvider.Builder defaultHttpJsonApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken("gapic", GaxProperties.getLibraryVersion(IntentsStubSettings.class)) .setTransportToken( GaxHttpJsonProperties.getHttpJsonTokenName(), GaxHttpJsonProperties.getHttpJsonVersion()); } public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return IntentsStubSettings.defaultGrpcApiClientHeaderProviderBuilder(); } /** Returns a new gRPC builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new REST builder for this class. */ public static Builder newHttpJsonBuilder() { return Builder.createHttpJsonDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected IntentsStubSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); listIntentsSettings = settingsBuilder.listIntentsSettings().build(); getIntentSettings = settingsBuilder.getIntentSettings().build(); createIntentSettings = settingsBuilder.createIntentSettings().build(); updateIntentSettings = settingsBuilder.updateIntentSettings().build(); deleteIntentSettings = settingsBuilder.deleteIntentSettings().build(); importIntentsSettings = settingsBuilder.importIntentsSettings().build(); importIntentsOperationSettings = settingsBuilder.importIntentsOperationSettings().build(); exportIntentsSettings = settingsBuilder.exportIntentsSettings().build(); exportIntentsOperationSettings = settingsBuilder.exportIntentsOperationSettings().build(); listLocationsSettings = settingsBuilder.listLocationsSettings().build(); getLocationSettings = settingsBuilder.getLocationSettings().build(); } /** Builder for IntentsStubSettings. */ public static class Builder extends StubSettings.Builder<IntentsStubSettings, Builder> { private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders; private final PagedCallSettings.Builder< ListIntentsRequest, ListIntentsResponse, ListIntentsPagedResponse> listIntentsSettings; private final UnaryCallSettings.Builder<GetIntentRequest, Intent> getIntentSettings; private final UnaryCallSettings.Builder<CreateIntentRequest, Intent> createIntentSettings; private final UnaryCallSettings.Builder<UpdateIntentRequest, Intent> updateIntentSettings; private final UnaryCallSettings.Builder<DeleteIntentRequest, Empty> deleteIntentSettings; private final UnaryCallSettings.Builder<ImportIntentsRequest, Operation> importIntentsSettings; private final OperationCallSettings.Builder< ImportIntentsRequest, ImportIntentsResponse, ImportIntentsMetadata> importIntentsOperationSettings; private final UnaryCallSettings.Builder<ExportIntentsRequest, Operation> exportIntentsSettings; private final OperationCallSettings.Builder< ExportIntentsRequest, ExportIntentsResponse, ExportIntentsMetadata> exportIntentsOperationSettings; private final PagedCallSettings.Builder< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings; private final UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings; private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions = ImmutableMap.builder(); definitions.put( "retry_policy_0_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList(StatusCode.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder(); RetrySettings settings = null; settings = RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(100L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelayDuration(Duration.ofMillis(60000L)) .setInitialRpcTimeoutDuration(Duration.ofMillis(60000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ofMillis(60000L)) .setTotalTimeoutDuration(Duration.ofMillis(60000L)) .build(); definitions.put("retry_policy_0_params", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(clientContext); listIntentsSettings = PagedCallSettings.newBuilder(LIST_INTENTS_PAGE_STR_FACT); getIntentSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); createIntentSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); updateIntentSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); deleteIntentSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); importIntentsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); importIntentsOperationSettings = OperationCallSettings.newBuilder(); exportIntentsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); exportIntentsOperationSettings = OperationCallSettings.newBuilder(); listLocationsSettings = PagedCallSettings.newBuilder(LIST_LOCATIONS_PAGE_STR_FACT); getLocationSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( listIntentsSettings, getIntentSettings, createIntentSettings, updateIntentSettings, deleteIntentSettings, importIntentsSettings, exportIntentsSettings, listLocationsSettings, getLocationSettings); initDefaults(this); } protected Builder(IntentsStubSettings settings) { super(settings); listIntentsSettings = settings.listIntentsSettings.toBuilder(); getIntentSettings = settings.getIntentSettings.toBuilder(); createIntentSettings = settings.createIntentSettings.toBuilder(); updateIntentSettings = settings.updateIntentSettings.toBuilder(); deleteIntentSettings = settings.deleteIntentSettings.toBuilder(); importIntentsSettings = settings.importIntentsSettings.toBuilder(); importIntentsOperationSettings = settings.importIntentsOperationSettings.toBuilder(); exportIntentsSettings = settings.exportIntentsSettings.toBuilder(); exportIntentsOperationSettings = settings.exportIntentsOperationSettings.toBuilder(); listLocationsSettings = settings.listLocationsSettings.toBuilder(); getLocationSettings = settings.getLocationSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( listIntentsSettings, getIntentSettings, createIntentSettings, updateIntentSettings, deleteIntentSettings, importIntentsSettings, exportIntentsSettings, listLocationsSettings, getLocationSettings); } private static Builder createDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder createHttpJsonDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultHttpJsonTransportProviderBuilder().build()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultHttpJsonApiClientHeaderProviderBuilder().build()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder initDefaults(Builder builder) { builder .listIntentsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .getIntentSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .createIntentSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .updateIntentSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .deleteIntentSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .importIntentsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .exportIntentsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .listLocationsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .getLocationSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .importIntentsOperationSettings() .setInitialCallSettings( UnaryCallSettings .<ImportIntentsRequest, OperationSnapshot>newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(ImportIntentsResponse.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(ImportIntentsMetadata.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(5000L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelayDuration(Duration.ofMillis(45000L)) .setInitialRpcTimeoutDuration(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ZERO) .setTotalTimeoutDuration(Duration.ofMillis(300000L)) .build())); builder .exportIntentsOperationSettings() .setInitialCallSettings( UnaryCallSettings .<ExportIntentsRequest, OperationSnapshot>newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(ExportIntentsResponse.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(ExportIntentsMetadata.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(5000L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelayDuration(Duration.ofMillis(45000L)) .setInitialRpcTimeoutDuration(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ZERO) .setTotalTimeoutDuration(Duration.ofMillis(300000L)) .build())); return builder; } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); return this; } public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() { return unaryMethodSettingsBuilders; } /** Returns the builder for the settings used for calls to listIntents. */ public PagedCallSettings.Builder< ListIntentsRequest, ListIntentsResponse, ListIntentsPagedResponse> listIntentsSettings() { return listIntentsSettings; } /** Returns the builder for the settings used for calls to getIntent. */ public UnaryCallSettings.Builder<GetIntentRequest, Intent> getIntentSettings() { return getIntentSettings; } /** Returns the builder for the settings used for calls to createIntent. */ public UnaryCallSettings.Builder<CreateIntentRequest, Intent> createIntentSettings() { return createIntentSettings; } /** Returns the builder for the settings used for calls to updateIntent. */ public UnaryCallSettings.Builder<UpdateIntentRequest, Intent> updateIntentSettings() { return updateIntentSettings; } /** Returns the builder for the settings used for calls to deleteIntent. */ public UnaryCallSettings.Builder<DeleteIntentRequest, Empty> deleteIntentSettings() { return deleteIntentSettings; } /** Returns the builder for the settings used for calls to importIntents. */ public UnaryCallSettings.Builder<ImportIntentsRequest, Operation> importIntentsSettings() { return importIntentsSettings; } /** Returns the builder for the settings used for calls to importIntents. */ public OperationCallSettings.Builder< ImportIntentsRequest, ImportIntentsResponse, ImportIntentsMetadata> importIntentsOperationSettings() { return importIntentsOperationSettings; } /** Returns the builder for the settings used for calls to exportIntents. */ public UnaryCallSettings.Builder<ExportIntentsRequest, Operation> exportIntentsSettings() { return exportIntentsSettings; } /** Returns the builder for the settings used for calls to exportIntents. */ public OperationCallSettings.Builder< ExportIntentsRequest, ExportIntentsResponse, ExportIntentsMetadata> exportIntentsOperationSettings() { return exportIntentsOperationSettings; } /** Returns the builder for the settings used for calls to listLocations. */ public PagedCallSettings.Builder< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings() { return listLocationsSettings; } /** Returns the builder for the settings used for calls to getLocation. */ public UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings() { return getLocationSettings; } @Override public IntentsStubSettings build() throws IOException { return new IntentsStubSettings(this); } } }
googleapis/google-cloud-java
35,227
java-vision/proto-google-cloud-vision-v1p4beta1/src/main/java/com/google/cloud/vision/v1p4beta1/BatchAnnotateFilesRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/vision/v1p4beta1/image_annotator.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.vision.v1p4beta1; /** * * * <pre> * A list of requests to annotate files using the BatchAnnotateFiles API. * </pre> * * Protobuf type {@code google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest} */ public final class BatchAnnotateFilesRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest) BatchAnnotateFilesRequestOrBuilder { private static final long serialVersionUID = 0L; // Use BatchAnnotateFilesRequest.newBuilder() to construct. private BatchAnnotateFilesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private BatchAnnotateFilesRequest() { requests_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new BatchAnnotateFilesRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.vision.v1p4beta1.ImageAnnotatorProto .internal_static_google_cloud_vision_v1p4beta1_BatchAnnotateFilesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.vision.v1p4beta1.ImageAnnotatorProto .internal_static_google_cloud_vision_v1p4beta1_BatchAnnotateFilesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest.class, com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest.Builder.class); } public static final int REQUESTS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.vision.v1p4beta1.AnnotateFileRequest> requests_; /** * * * <pre> * Required. The list of file annotation requests. Right now we support only * one AnnotateFileRequest in BatchAnnotateFilesRequest. * </pre> * * <code> * repeated .google.cloud.vision.v1p4beta1.AnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public java.util.List<com.google.cloud.vision.v1p4beta1.AnnotateFileRequest> getRequestsList() { return requests_; } /** * * * <pre> * Required. The list of file annotation requests. Right now we support only * one AnnotateFileRequest in BatchAnnotateFilesRequest. * </pre> * * <code> * repeated .google.cloud.vision.v1p4beta1.AnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.vision.v1p4beta1.AnnotateFileRequestOrBuilder> getRequestsOrBuilderList() { return requests_; } /** * * * <pre> * Required. The list of file annotation requests. Right now we support only * one AnnotateFileRequest in BatchAnnotateFilesRequest. * </pre> * * <code> * repeated .google.cloud.vision.v1p4beta1.AnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public int getRequestsCount() { return requests_.size(); } /** * * * <pre> * Required. The list of file annotation requests. Right now we support only * one AnnotateFileRequest in BatchAnnotateFilesRequest. * </pre> * * <code> * repeated .google.cloud.vision.v1p4beta1.AnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.vision.v1p4beta1.AnnotateFileRequest getRequests(int index) { return requests_.get(index); } /** * * * <pre> * Required. The list of file annotation requests. Right now we support only * one AnnotateFileRequest in BatchAnnotateFilesRequest. * </pre> * * <code> * repeated .google.cloud.vision.v1p4beta1.AnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.vision.v1p4beta1.AnnotateFileRequestOrBuilder getRequestsOrBuilder( int index) { return requests_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < requests_.size(); i++) { output.writeMessage(1, requests_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < requests_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, requests_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest)) { return super.equals(obj); } com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest other = (com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest) obj; if (!getRequestsList().equals(other.getRequestsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getRequestsCount() > 0) { hash = (37 * hash) + REQUESTS_FIELD_NUMBER; hash = (53 * hash) + getRequestsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A list of requests to annotate files using the BatchAnnotateFiles API. * </pre> * * Protobuf type {@code google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest) com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.vision.v1p4beta1.ImageAnnotatorProto .internal_static_google_cloud_vision_v1p4beta1_BatchAnnotateFilesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.vision.v1p4beta1.ImageAnnotatorProto .internal_static_google_cloud_vision_v1p4beta1_BatchAnnotateFilesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest.class, com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest.Builder.class); } // Construct using com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (requestsBuilder_ == null) { requests_ = java.util.Collections.emptyList(); } else { requests_ = null; requestsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.vision.v1p4beta1.ImageAnnotatorProto .internal_static_google_cloud_vision_v1p4beta1_BatchAnnotateFilesRequest_descriptor; } @java.lang.Override public com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest getDefaultInstanceForType() { return com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest build() { com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest buildPartial() { com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest result = new com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest result) { if (requestsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { requests_ = java.util.Collections.unmodifiableList(requests_); bitField0_ = (bitField0_ & ~0x00000001); } result.requests_ = requests_; } else { result.requests_ = requestsBuilder_.build(); } } private void buildPartial0(com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest) { return mergeFrom((com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest other) { if (other == com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest.getDefaultInstance()) return this; if (requestsBuilder_ == null) { if (!other.requests_.isEmpty()) { if (requests_.isEmpty()) { requests_ = other.requests_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureRequestsIsMutable(); requests_.addAll(other.requests_); } onChanged(); } } else { if (!other.requests_.isEmpty()) { if (requestsBuilder_.isEmpty()) { requestsBuilder_.dispose(); requestsBuilder_ = null; requests_ = other.requests_; bitField0_ = (bitField0_ & ~0x00000001); requestsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRequestsFieldBuilder() : null; } else { requestsBuilder_.addAllMessages(other.requests_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.vision.v1p4beta1.AnnotateFileRequest m = input.readMessage( com.google.cloud.vision.v1p4beta1.AnnotateFileRequest.parser(), extensionRegistry); if (requestsBuilder_ == null) { ensureRequestsIsMutable(); requests_.add(m); } else { requestsBuilder_.addMessage(m); } break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.vision.v1p4beta1.AnnotateFileRequest> requests_ = java.util.Collections.emptyList(); private void ensureRequestsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { requests_ = new java.util.ArrayList<com.google.cloud.vision.v1p4beta1.AnnotateFileRequest>( requests_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.vision.v1p4beta1.AnnotateFileRequest, com.google.cloud.vision.v1p4beta1.AnnotateFileRequest.Builder, com.google.cloud.vision.v1p4beta1.AnnotateFileRequestOrBuilder> requestsBuilder_; /** * * * <pre> * Required. The list of file annotation requests. Right now we support only * one AnnotateFileRequest in BatchAnnotateFilesRequest. * </pre> * * <code> * repeated .google.cloud.vision.v1p4beta1.AnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public java.util.List<com.google.cloud.vision.v1p4beta1.AnnotateFileRequest> getRequestsList() { if (requestsBuilder_ == null) { return java.util.Collections.unmodifiableList(requests_); } else { return requestsBuilder_.getMessageList(); } } /** * * * <pre> * Required. The list of file annotation requests. Right now we support only * one AnnotateFileRequest in BatchAnnotateFilesRequest. * </pre> * * <code> * repeated .google.cloud.vision.v1p4beta1.AnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public int getRequestsCount() { if (requestsBuilder_ == null) { return requests_.size(); } else { return requestsBuilder_.getCount(); } } /** * * * <pre> * Required. The list of file annotation requests. Right now we support only * one AnnotateFileRequest in BatchAnnotateFilesRequest. * </pre> * * <code> * repeated .google.cloud.vision.v1p4beta1.AnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.vision.v1p4beta1.AnnotateFileRequest getRequests(int index) { if (requestsBuilder_ == null) { return requests_.get(index); } else { return requestsBuilder_.getMessage(index); } } /** * * * <pre> * Required. The list of file annotation requests. Right now we support only * one AnnotateFileRequest in BatchAnnotateFilesRequest. * </pre> * * <code> * repeated .google.cloud.vision.v1p4beta1.AnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setRequests( int index, com.google.cloud.vision.v1p4beta1.AnnotateFileRequest value) { if (requestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRequestsIsMutable(); requests_.set(index, value); onChanged(); } else { requestsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Required. The list of file annotation requests. Right now we support only * one AnnotateFileRequest in BatchAnnotateFilesRequest. * </pre> * * <code> * repeated .google.cloud.vision.v1p4beta1.AnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setRequests( int index, com.google.cloud.vision.v1p4beta1.AnnotateFileRequest.Builder builderForValue) { if (requestsBuilder_ == null) { ensureRequestsIsMutable(); requests_.set(index, builderForValue.build()); onChanged(); } else { requestsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Required. The list of file annotation requests. Right now we support only * one AnnotateFileRequest in BatchAnnotateFilesRequest. * </pre> * * <code> * repeated .google.cloud.vision.v1p4beta1.AnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addRequests(com.google.cloud.vision.v1p4beta1.AnnotateFileRequest value) { if (requestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRequestsIsMutable(); requests_.add(value); onChanged(); } else { requestsBuilder_.addMessage(value); } return this; } /** * * * <pre> * Required. The list of file annotation requests. Right now we support only * one AnnotateFileRequest in BatchAnnotateFilesRequest. * </pre> * * <code> * repeated .google.cloud.vision.v1p4beta1.AnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addRequests( int index, com.google.cloud.vision.v1p4beta1.AnnotateFileRequest value) { if (requestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRequestsIsMutable(); requests_.add(index, value); onChanged(); } else { requestsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Required. The list of file annotation requests. Right now we support only * one AnnotateFileRequest in BatchAnnotateFilesRequest. * </pre> * * <code> * repeated .google.cloud.vision.v1p4beta1.AnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addRequests( com.google.cloud.vision.v1p4beta1.AnnotateFileRequest.Builder builderForValue) { if (requestsBuilder_ == null) { ensureRequestsIsMutable(); requests_.add(builderForValue.build()); onChanged(); } else { requestsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Required. The list of file annotation requests. Right now we support only * one AnnotateFileRequest in BatchAnnotateFilesRequest. * </pre> * * <code> * repeated .google.cloud.vision.v1p4beta1.AnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addRequests( int index, com.google.cloud.vision.v1p4beta1.AnnotateFileRequest.Builder builderForValue) { if (requestsBuilder_ == null) { ensureRequestsIsMutable(); requests_.add(index, builderForValue.build()); onChanged(); } else { requestsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Required. The list of file annotation requests. Right now we support only * one AnnotateFileRequest in BatchAnnotateFilesRequest. * </pre> * * <code> * repeated .google.cloud.vision.v1p4beta1.AnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addAllRequests( java.lang.Iterable<? extends com.google.cloud.vision.v1p4beta1.AnnotateFileRequest> values) { if (requestsBuilder_ == null) { ensureRequestsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, requests_); onChanged(); } else { requestsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Required. The list of file annotation requests. Right now we support only * one AnnotateFileRequest in BatchAnnotateFilesRequest. * </pre> * * <code> * repeated .google.cloud.vision.v1p4beta1.AnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearRequests() { if (requestsBuilder_ == null) { requests_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { requestsBuilder_.clear(); } return this; } /** * * * <pre> * Required. The list of file annotation requests. Right now we support only * one AnnotateFileRequest in BatchAnnotateFilesRequest. * </pre> * * <code> * repeated .google.cloud.vision.v1p4beta1.AnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder removeRequests(int index) { if (requestsBuilder_ == null) { ensureRequestsIsMutable(); requests_.remove(index); onChanged(); } else { requestsBuilder_.remove(index); } return this; } /** * * * <pre> * Required. The list of file annotation requests. Right now we support only * one AnnotateFileRequest in BatchAnnotateFilesRequest. * </pre> * * <code> * repeated .google.cloud.vision.v1p4beta1.AnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.vision.v1p4beta1.AnnotateFileRequest.Builder getRequestsBuilder( int index) { return getRequestsFieldBuilder().getBuilder(index); } /** * * * <pre> * Required. The list of file annotation requests. Right now we support only * one AnnotateFileRequest in BatchAnnotateFilesRequest. * </pre> * * <code> * repeated .google.cloud.vision.v1p4beta1.AnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.vision.v1p4beta1.AnnotateFileRequestOrBuilder getRequestsOrBuilder( int index) { if (requestsBuilder_ == null) { return requests_.get(index); } else { return requestsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Required. The list of file annotation requests. Right now we support only * one AnnotateFileRequest in BatchAnnotateFilesRequest. * </pre> * * <code> * repeated .google.cloud.vision.v1p4beta1.AnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public java.util.List<? extends com.google.cloud.vision.v1p4beta1.AnnotateFileRequestOrBuilder> getRequestsOrBuilderList() { if (requestsBuilder_ != null) { return requestsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(requests_); } } /** * * * <pre> * Required. The list of file annotation requests. Right now we support only * one AnnotateFileRequest in BatchAnnotateFilesRequest. * </pre> * * <code> * repeated .google.cloud.vision.v1p4beta1.AnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.vision.v1p4beta1.AnnotateFileRequest.Builder addRequestsBuilder() { return getRequestsFieldBuilder() .addBuilder(com.google.cloud.vision.v1p4beta1.AnnotateFileRequest.getDefaultInstance()); } /** * * * <pre> * Required. The list of file annotation requests. Right now we support only * one AnnotateFileRequest in BatchAnnotateFilesRequest. * </pre> * * <code> * repeated .google.cloud.vision.v1p4beta1.AnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.vision.v1p4beta1.AnnotateFileRequest.Builder addRequestsBuilder( int index) { return getRequestsFieldBuilder() .addBuilder( index, com.google.cloud.vision.v1p4beta1.AnnotateFileRequest.getDefaultInstance()); } /** * * * <pre> * Required. The list of file annotation requests. Right now we support only * one AnnotateFileRequest in BatchAnnotateFilesRequest. * </pre> * * <code> * repeated .google.cloud.vision.v1p4beta1.AnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public java.util.List<com.google.cloud.vision.v1p4beta1.AnnotateFileRequest.Builder> getRequestsBuilderList() { return getRequestsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.vision.v1p4beta1.AnnotateFileRequest, com.google.cloud.vision.v1p4beta1.AnnotateFileRequest.Builder, com.google.cloud.vision.v1p4beta1.AnnotateFileRequestOrBuilder> getRequestsFieldBuilder() { if (requestsBuilder_ == null) { requestsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.vision.v1p4beta1.AnnotateFileRequest, com.google.cloud.vision.v1p4beta1.AnnotateFileRequest.Builder, com.google.cloud.vision.v1p4beta1.AnnotateFileRequestOrBuilder>( requests_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); requests_ = null; } return requestsBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest) } // @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest) private static final com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest(); } public static com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<BatchAnnotateFilesRequest> PARSER = new com.google.protobuf.AbstractParser<BatchAnnotateFilesRequest>() { @java.lang.Override public BatchAnnotateFilesRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<BatchAnnotateFilesRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<BatchAnnotateFilesRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,199
java-dialogflow/proto-google-cloud-dialogflow-v2beta1/src/main/java/com/google/cloud/dialogflow/v2beta1/ListSessionEntityTypesRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/v2beta1/session_entity_type.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dialogflow.v2beta1; /** * * * <pre> * The request message for * [SessionEntityTypes.ListSessionEntityTypes][google.cloud.dialogflow.v2beta1.SessionEntityTypes.ListSessionEntityTypes]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest} */ public final class ListSessionEntityTypesRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest) ListSessionEntityTypesRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListSessionEntityTypesRequest.newBuilder() to construct. private ListSessionEntityTypesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListSessionEntityTypesRequest() { parent_ = ""; pageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListSessionEntityTypesRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.v2beta1.SessionEntityTypeProto .internal_static_google_cloud_dialogflow_v2beta1_ListSessionEntityTypesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.v2beta1.SessionEntityTypeProto .internal_static_google_cloud_dialogflow_v2beta1_ListSessionEntityTypesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest.class, com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The session to list all session entity types from. * Supported formats: * - `projects/&lt;Project ID&gt;/agent/sessions/&lt;Session ID&gt;, * - `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agent/sessions/&lt;Session * ID&gt;`, * - `projects/&lt;Project ID&gt;/agent/environments/&lt;Environment ID&gt;/users/&lt;User * ID&gt;/sessions/&lt;Session ID&gt;`, * - `projects/&lt;Project ID&gt;/locations/&lt;Location * ID&gt;/agent/environments/&lt;Environment ID&gt;/users/&lt;User ID&gt;/sessions/&lt;Session * ID&gt;`, * * If `Location ID` is not specified we assume default 'us' location. If * `Environment ID` is not specified, we assume default 'draft' environment. * If `User ID` is not specified, we assume default '-' user. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The session to list all session entity types from. * Supported formats: * - `projects/&lt;Project ID&gt;/agent/sessions/&lt;Session ID&gt;, * - `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agent/sessions/&lt;Session * ID&gt;`, * - `projects/&lt;Project ID&gt;/agent/environments/&lt;Environment ID&gt;/users/&lt;User * ID&gt;/sessions/&lt;Session ID&gt;`, * - `projects/&lt;Project ID&gt;/locations/&lt;Location * ID&gt;/agent/environments/&lt;Environment ID&gt;/users/&lt;User ID&gt;/sessions/&lt;Session * ID&gt;`, * * If `Location ID` is not specified we assume default 'us' location. If * `Environment ID` is not specified, we assume default 'draft' environment. * If `User ID` is not specified, we assume default '-' user. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_ = 0; /** * * * <pre> * Optional. The maximum number of items to return in a single page. By * default 100 and at most 1000. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. The next_page_token value returned from a previous list request. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * Optional. The next_page_token value returned from a previous list request. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest)) { return super.equals(obj); } com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest other = (com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest) obj; if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The request message for * [SessionEntityTypes.ListSessionEntityTypes][google.cloud.dialogflow.v2beta1.SessionEntityTypes.ListSessionEntityTypes]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest) com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.v2beta1.SessionEntityTypeProto .internal_static_google_cloud_dialogflow_v2beta1_ListSessionEntityTypesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.v2beta1.SessionEntityTypeProto .internal_static_google_cloud_dialogflow_v2beta1_ListSessionEntityTypesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest.class, com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest.Builder.class); } // Construct using // com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; pageSize_ = 0; pageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dialogflow.v2beta1.SessionEntityTypeProto .internal_static_google_cloud_dialogflow_v2beta1_ListSessionEntityTypesRequest_descriptor; } @java.lang.Override public com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest getDefaultInstanceForType() { return com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest build() { com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest buildPartial() { com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest result = new com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageToken_ = pageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest) { return mergeFrom((com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest other) { if (other == com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The session to list all session entity types from. * Supported formats: * - `projects/&lt;Project ID&gt;/agent/sessions/&lt;Session ID&gt;, * - `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agent/sessions/&lt;Session * ID&gt;`, * - `projects/&lt;Project ID&gt;/agent/environments/&lt;Environment ID&gt;/users/&lt;User * ID&gt;/sessions/&lt;Session ID&gt;`, * - `projects/&lt;Project ID&gt;/locations/&lt;Location * ID&gt;/agent/environments/&lt;Environment ID&gt;/users/&lt;User ID&gt;/sessions/&lt;Session * ID&gt;`, * * If `Location ID` is not specified we assume default 'us' location. If * `Environment ID` is not specified, we assume default 'draft' environment. * If `User ID` is not specified, we assume default '-' user. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The session to list all session entity types from. * Supported formats: * - `projects/&lt;Project ID&gt;/agent/sessions/&lt;Session ID&gt;, * - `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agent/sessions/&lt;Session * ID&gt;`, * - `projects/&lt;Project ID&gt;/agent/environments/&lt;Environment ID&gt;/users/&lt;User * ID&gt;/sessions/&lt;Session ID&gt;`, * - `projects/&lt;Project ID&gt;/locations/&lt;Location * ID&gt;/agent/environments/&lt;Environment ID&gt;/users/&lt;User ID&gt;/sessions/&lt;Session * ID&gt;`, * * If `Location ID` is not specified we assume default 'us' location. If * `Environment ID` is not specified, we assume default 'draft' environment. * If `User ID` is not specified, we assume default '-' user. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The session to list all session entity types from. * Supported formats: * - `projects/&lt;Project ID&gt;/agent/sessions/&lt;Session ID&gt;, * - `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agent/sessions/&lt;Session * ID&gt;`, * - `projects/&lt;Project ID&gt;/agent/environments/&lt;Environment ID&gt;/users/&lt;User * ID&gt;/sessions/&lt;Session ID&gt;`, * - `projects/&lt;Project ID&gt;/locations/&lt;Location * ID&gt;/agent/environments/&lt;Environment ID&gt;/users/&lt;User ID&gt;/sessions/&lt;Session * ID&gt;`, * * If `Location ID` is not specified we assume default 'us' location. If * `Environment ID` is not specified, we assume default 'draft' environment. * If `User ID` is not specified, we assume default '-' user. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The session to list all session entity types from. * Supported formats: * - `projects/&lt;Project ID&gt;/agent/sessions/&lt;Session ID&gt;, * - `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agent/sessions/&lt;Session * ID&gt;`, * - `projects/&lt;Project ID&gt;/agent/environments/&lt;Environment ID&gt;/users/&lt;User * ID&gt;/sessions/&lt;Session ID&gt;`, * - `projects/&lt;Project ID&gt;/locations/&lt;Location * ID&gt;/agent/environments/&lt;Environment ID&gt;/users/&lt;User ID&gt;/sessions/&lt;Session * ID&gt;`, * * If `Location ID` is not specified we assume default 'us' location. If * `Environment ID` is not specified, we assume default 'draft' environment. * If `User ID` is not specified, we assume default '-' user. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The session to list all session entity types from. * Supported formats: * - `projects/&lt;Project ID&gt;/agent/sessions/&lt;Session ID&gt;, * - `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agent/sessions/&lt;Session * ID&gt;`, * - `projects/&lt;Project ID&gt;/agent/environments/&lt;Environment ID&gt;/users/&lt;User * ID&gt;/sessions/&lt;Session ID&gt;`, * - `projects/&lt;Project ID&gt;/locations/&lt;Location * ID&gt;/agent/environments/&lt;Environment ID&gt;/users/&lt;User ID&gt;/sessions/&lt;Session * ID&gt;`, * * If `Location ID` is not specified we assume default 'us' location. If * `Environment ID` is not specified, we assume default 'draft' environment. * If `User ID` is not specified, we assume default '-' user. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Optional. The maximum number of items to return in a single page. By * default 100 and at most 1000. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Optional. The maximum number of items to return in a single page. By * default 100 and at most 1000. * </pre> * * <code>int32 page_size = 2;</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. The maximum number of items to return in a single page. By * default 100 and at most 1000. * </pre> * * <code>int32 page_size = 2;</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000002); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. The next_page_token value returned from a previous list request. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. The next_page_token value returned from a previous list request. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. The next_page_token value returned from a previous list request. * </pre> * * <code>string page_token = 3;</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. The next_page_token value returned from a previous list request. * </pre> * * <code>string page_token = 3;</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Optional. The next_page_token value returned from a previous list request. * </pre> * * <code>string page_token = 3;</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest) } // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest) private static final com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest(); } public static com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListSessionEntityTypesRequest> PARSER = new com.google.protobuf.AbstractParser<ListSessionEntityTypesRequest>() { @java.lang.Override public ListSessionEntityTypesRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListSessionEntityTypesRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListSessionEntityTypesRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/fineract
35,558
fineract-provider/src/main/java/org/apache/fineract/interoperation/service/InteropServiceImpl.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.fineract.interoperation.service; import static org.apache.fineract.interoperation.util.InteropUtil.DEFAULT_LOCALE; import static org.apache.fineract.interoperation.util.InteropUtil.DEFAULT_ROUTING_CODE; import static org.apache.fineract.portfolio.paymentdetail.domain.PaymentDetail.instance; import static org.apache.fineract.portfolio.savings.SavingsAccountTransactionType.AMOUNT_HOLD; import static org.apache.fineract.portfolio.savings.SavingsAccountTransactionType.DEPOSIT; import static org.apache.fineract.portfolio.savings.SavingsAccountTransactionType.WITHDRAWAL; import static org.apache.fineract.portfolio.savings.domain.SavingsAccountTransaction.releaseAmount; import jakarta.persistence.PersistenceException; import java.math.BigDecimal; import java.sql.ResultSet; import java.sql.SQLException; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.util.List; import java.util.Locale; import java.util.function.Predicate; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.fineract.commands.domain.CommandWrapper; import org.apache.fineract.commands.service.CommandWrapperBuilder; import org.apache.fineract.commands.service.PortfolioCommandSourceWritePlatformService; import org.apache.fineract.infrastructure.core.api.JsonCommand; import org.apache.fineract.infrastructure.core.data.CommandProcessingResult; import org.apache.fineract.infrastructure.core.exception.ErrorHandler; import org.apache.fineract.infrastructure.core.exception.PlatformDataIntegrityException; import org.apache.fineract.infrastructure.core.serialization.DefaultToApiJsonSerializer; import org.apache.fineract.infrastructure.core.service.DateUtils; import org.apache.fineract.infrastructure.core.service.ExternalIdFactory; import org.apache.fineract.infrastructure.core.service.MathUtil; import org.apache.fineract.infrastructure.core.service.database.DatabaseSpecificSQLGenerator; import org.apache.fineract.infrastructure.security.service.PlatformSecurityContext; import org.apache.fineract.interoperation.data.InteropAccountData; import org.apache.fineract.interoperation.data.InteropIdentifierAccountResponseData; import org.apache.fineract.interoperation.data.InteropIdentifierRequestData; import org.apache.fineract.interoperation.data.InteropIdentifiersResponseData; import org.apache.fineract.interoperation.data.InteropKycData; import org.apache.fineract.interoperation.data.InteropKycResponseData; import org.apache.fineract.interoperation.data.InteropQuoteRequestData; import org.apache.fineract.interoperation.data.InteropQuoteResponseData; import org.apache.fineract.interoperation.data.InteropRequestData; import org.apache.fineract.interoperation.data.InteropTransactionData; import org.apache.fineract.interoperation.data.InteropTransactionRequestData; import org.apache.fineract.interoperation.data.InteropTransactionRequestResponseData; import org.apache.fineract.interoperation.data.InteropTransactionsData; import org.apache.fineract.interoperation.data.InteropTransferRequestData; import org.apache.fineract.interoperation.data.InteropTransferResponseData; import org.apache.fineract.interoperation.data.MoneyData; import org.apache.fineract.interoperation.domain.InteropActionState; import org.apache.fineract.interoperation.domain.InteropIdentifier; import org.apache.fineract.interoperation.domain.InteropIdentifierRepository; import org.apache.fineract.interoperation.domain.InteropIdentifierType; import org.apache.fineract.interoperation.exception.InteropAccountNotFoundException; import org.apache.fineract.interoperation.exception.InteropAccountTransactionNotAllowedException; import org.apache.fineract.interoperation.exception.InteropKycDataNotFoundException; import org.apache.fineract.interoperation.exception.InteropTransferAlreadyCommittedException; import org.apache.fineract.interoperation.exception.InteropTransferAlreadyOnHoldException; import org.apache.fineract.interoperation.exception.InteropTransferMissingException; import org.apache.fineract.interoperation.serialization.InteropDataValidator; import org.apache.fineract.organisation.monetary.domain.ApplicationCurrency; import org.apache.fineract.organisation.monetary.domain.ApplicationCurrencyRepository; import org.apache.fineract.organisation.monetary.domain.MonetaryCurrency; import org.apache.fineract.organisation.monetary.domain.Money; import org.apache.fineract.portfolio.account.exception.DifferentCurrenciesException; import org.apache.fineract.portfolio.loanaccount.data.LoanAccountData; import org.apache.fineract.portfolio.loanaccount.domain.Loan; import org.apache.fineract.portfolio.loanaccount.domain.LoanRepositoryWrapper; import org.apache.fineract.portfolio.loanaccount.exception.LoanNotFoundException; import org.apache.fineract.portfolio.note.domain.Note; import org.apache.fineract.portfolio.note.domain.NoteRepository; import org.apache.fineract.portfolio.paymentdetail.domain.PaymentDetail; import org.apache.fineract.portfolio.paymenttype.domain.PaymentType; import org.apache.fineract.portfolio.paymenttype.domain.PaymentTypeRepository; import org.apache.fineract.portfolio.savings.SavingsAccountTransactionType; import org.apache.fineract.portfolio.savings.SavingsTransactionBooleanValues; import org.apache.fineract.portfolio.savings.domain.SavingsAccount; import org.apache.fineract.portfolio.savings.domain.SavingsAccountRepository; import org.apache.fineract.portfolio.savings.domain.SavingsAccountTransaction; import org.apache.fineract.portfolio.savings.domain.SavingsAccountTransactionRepository; import org.apache.fineract.portfolio.savings.domain.SavingsAccountTransactionSummaryWrapper; import org.apache.fineract.portfolio.savings.domain.SavingsHelper; import org.apache.fineract.portfolio.savings.exception.InsufficientAccountBalanceException; import org.apache.fineract.portfolio.savings.exception.SavingsAccountNotFoundException; import org.apache.fineract.portfolio.savings.service.SavingsAccountDomainService; import org.apache.fineract.useradministration.domain.AppUser; import org.springframework.dao.DataIntegrityViolationException; import org.springframework.dao.EmptyResultDataAccessException; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.RowMapper; import org.springframework.lang.NonNull; import org.springframework.orm.jpa.JpaSystemException; import org.springframework.transaction.annotation.Transactional; @Slf4j @RequiredArgsConstructor public class InteropServiceImpl implements InteropService { private final PlatformSecurityContext securityContext; private final InteropDataValidator dataValidator; private final SavingsAccountRepository savingsAccountRepository; private final SavingsAccountTransactionRepository savingsAccountTransactionRepository; private final ApplicationCurrencyRepository currencyRepository; private final NoteRepository noteRepository; private final PaymentTypeRepository paymentTypeRepository; private final InteropIdentifierRepository identifierRepository; private final LoanRepositoryWrapper loanRepositoryWrapper; private final SavingsHelper savingsHelper; private final SavingsAccountTransactionSummaryWrapper savingsAccountTransactionSummaryWrapper; private final SavingsAccountDomainService savingsAccountService; private final JdbcTemplate jdbcTemplate; private final PortfolioCommandSourceWritePlatformService commandsSourceWritePlatformService; private final DefaultToApiJsonSerializer<LoanAccountData> toApiJsonSerializer; private final DatabaseSpecificSQLGenerator sqlGenerator; private static final class KycMapper implements RowMapper<InteropKycData> { private final DatabaseSpecificSQLGenerator sqlGenerator; KycMapper(DatabaseSpecificSQLGenerator sqlGenerator) { this.sqlGenerator = sqlGenerator; } public String schema() { return " country.code_value as nationality, c.date_of_birth as dateOfBirth, c.mobile_no as contactPhone, gender.code_value as gender, c.email_address as email, " + "kyc.code_value as idType, ci.document_key as idNo, ci." + sqlGenerator.escape("description") + " as description, " + "country.code_value as country, a.`address_line_1`, a.`address_line_2`, " + "a.city, state.code_value as stateProvince, a.postal_code as postalCode, c.firstname as firstName, c.middlename as middleName," + "c.lastname as lastName, c.display_name as displayName" + " from " + "m_client c " + "left join m_client_address ca on c.id=ca.client_id " + "left join m_address a on a.id = ca.address_id " + "inner join m_code_value gender on gender.id=c.gender_cv_id " + "left join m_code_value country on country.id=a.country_id " + "left join m_code_value state on state.id = a.state_province_id " + "left join m_client_identifier ci on c.id=ci.client_id " + "left join m_code_value kyc on kyc.id = ci.document_type_id "; } @Override public InteropKycData mapRow(final ResultSet rs, @SuppressWarnings("unused") final int rowNum) throws SQLException { final String nationality = rs.getString("nationality"); final String dateOfBirth = rs.getString("dateOfBirth"); final String contactPhone = rs.getString("contactPhone"); final String gender = rs.getString("gender"); final String email = rs.getString("email"); final String idType = rs.getString("idType"); final String idNo = rs.getString("idNo"); final String description = rs.getString("description"); final String country = rs.getString("country"); final String addressLine1 = rs.getString("address_line_1"); final String addressLine2 = rs.getString("address_line_2"); final String city = rs.getString("city"); final String stateProvince = rs.getString("stateProvince"); final String postalCode = rs.getString("postalCode"); final String firstName = rs.getString("firstName"); final String middleName = rs.getString("middleName"); final String lastName = rs.getString("lastName"); final String displayName = rs.getString("displayName"); return InteropKycData.instance(nationality, dateOfBirth, contactPhone, gender, email, idType, idNo, description, country, addressLine1, addressLine2, city, stateProvince, postalCode, firstName, middleName, lastName, displayName); } } @NonNull @Override @Transactional public InteropAccountData getAccountDetails(@NonNull String accountId) { return InteropAccountData.build(validateAndGetSavingAccount(accountId)); } @NonNull @Override @Transactional public InteropTransactionsData getAccountTransactions(@NonNull String accountId, boolean debit, boolean credit, java.time.LocalDateTime transactionsFrom, java.time.LocalDateTime transactionsTo) { SavingsAccount savingsAccount = validateAndGetSavingAccount(accountId); Predicate<SavingsAccountTransaction> transFilter = t -> { SavingsAccountTransactionType transactionType = t.getTransactionType(); if (debit != transactionType.isDebit() && credit != transactionType.isCredit()) { return false; } if (transactionsFrom == null && transactionsTo == null) { return true; } java.time.LocalDateTime transactionDate = t.getTransactionDate().atStartOfDay(ZoneId.systemDefault()).toLocalDateTime(); return (transactionsTo == null || transactionsTo.compareTo(transactionDate) > 0) && (transactionsFrom == null || transactionsFrom.compareTo(transactionDate.withHour(23).withMinute(59).withSecond(59)) <= 0); }; InteropTransactionsData interopTransactionsData = InteropTransactionsData.build(savingsAccount, transFilter); for (InteropTransactionData interopTransactionData : interopTransactionsData.getTransactions()) { final List<Note> transactionNotes = noteRepository .findBySavingsTransactionId(Long.valueOf(interopTransactionData.getTransactionId())); StringBuilder sb = new StringBuilder(); for (final Note note : transactionNotes) { String s = note.getNote(); if (s == null) { continue; } sb.append(s + " "); } if (sb.toString().length() > 0) { String text = interopTransactionData.getNote() + " " + sb.toString(); if (text.length() > 500) { text = text.substring(0, 500); } interopTransactionData.updateNote(text); } } return interopTransactionsData; } @NonNull @Override @Transactional public InteropIdentifiersResponseData getAccountIdentifiers(@NonNull String accountId) { SavingsAccount savingsAccount = validateAndGetSavingAccount(accountId); return InteropIdentifiersResponseData.build(savingsAccount); } @NonNull @Transactional @Override public InteropIdentifierAccountResponseData getAccountByIdentifier(@NonNull InteropIdentifierType idType, @NonNull String idValue, String subIdOrType) { InteropIdentifier identifier = findIdentifier(idType, idValue, subIdOrType); if (identifier == null) { throw new InteropAccountNotFoundException(idType, idValue, subIdOrType); } return InteropIdentifierAccountResponseData.build(identifier.getId(), identifier.getAccount().getExternalId().getValue()); } @NonNull @Transactional @Override public InteropIdentifierAccountResponseData registerAccountIdentifier(@NonNull InteropIdentifierType idType, @NonNull String idValue, String subIdOrType, @NonNull JsonCommand command) { InteropIdentifierRequestData request = dataValidator.validateAndParseCreateIdentifier(idType, idValue, subIdOrType, command); // TODO: error handling SavingsAccount savingsAccount = validateAndGetSavingAccount(request.getAccountId()); try { AppUser createdBy = securityContext.authenticatedUser(); InteropIdentifier identifier = new InteropIdentifier(savingsAccount, request.getIdType(), request.getIdValue(), request.getSubIdOrType(), createdBy.getUsername()); identifierRepository.saveAndFlush(identifier); return InteropIdentifierAccountResponseData.build(identifier.getId(), savingsAccount.getExternalId().getValue()); } catch (final JpaSystemException | DataIntegrityViolationException dve) { handleInteropDataIntegrityIssues(idType, request.getAccountId(), dve.getMostSpecificCause(), dve); return InteropIdentifierAccountResponseData.empty(); } catch (final PersistenceException dve) { Throwable throwable = ExceptionUtils.getRootCause(dve.getCause()); handleInteropDataIntegrityIssues(idType, request.getAccountId(), throwable, dve); return InteropIdentifierAccountResponseData.empty(); } } @NonNull @Transactional @Override public InteropIdentifierAccountResponseData deleteAccountIdentifier(@NonNull InteropIdentifierType idType, @NonNull String idValue, String subIdOrType) { InteropIdentifier identifier = findIdentifier(idType, idValue, subIdOrType); if (identifier == null) { throw new InteropAccountNotFoundException(idType, idValue, subIdOrType); } String accountId = identifier.getAccount().getExternalId().getValue(); Long id = identifier.getId(); identifierRepository.delete(identifier); return InteropIdentifierAccountResponseData.build(id, accountId); } @Override public InteropTransactionRequestResponseData getTransactionRequest(@NonNull String transactionCode, @NonNull String requestCode) { // always REJECTED until request info is stored return InteropTransactionRequestResponseData.build(transactionCode, InteropActionState.REJECTED, requestCode); } @Override @NonNull @Transactional public InteropTransactionRequestResponseData createTransactionRequest(@NonNull JsonCommand command) { // only when Payee request transaction from Payer, so here role must be // always Payer InteropTransactionRequestData request = dataValidator.validateAndParseCreateRequest(command); // TODO: error handling validateAndGetSavingAccount(request); return InteropTransactionRequestResponseData.build(command.commandId(), request.getTransactionCode(), InteropActionState.ACCEPTED, request.getExpiration(), request.getExtensionList(), request.getRequestCode()); } @Override public InteropQuoteResponseData getQuote(@NonNull String transactionCode, @NonNull String quoteCode) { return null; } @Override @NonNull @Transactional public InteropQuoteResponseData createQuote(@NonNull JsonCommand command) { InteropQuoteRequestData request = dataValidator.validateAndParseCreateQuote(command); SavingsAccount savingsAccount = validateAndGetSavingAccount(request); SavingsAccountTransactionType transactionType = request.getTransactionRole().getTransactionType(); final BigDecimal fee; if (transactionType.isDebit()) { fee = savingsAccount.calculateWithdrawalFee(request.getAmount().getAmount()); if (MathUtil.isLessThan(savingsAccount.getWithdrawableBalance(), request.getAmount().getAmount().add(fee))) { throw new InsufficientAccountBalanceException(savingsAccount.getExternalId().getValue(), savingsAccount.getWithdrawableBalance(), fee, request.getAmount().getAmount()); } } else { fee = BigDecimal.ZERO; } return InteropQuoteResponseData.build(command.commandId(), request.getTransactionCode(), InteropActionState.ACCEPTED, request.getExpiration(), request.getExtensionList(), request.getQuoteCode(), MoneyData.build(fee, savingsAccount.getCurrency().getCode()), null); } @Override public InteropTransferResponseData getTransfer(@NonNull String transactionCode, @NonNull String transferCode) { return null; } @Override @NonNull @Transactional public InteropTransferResponseData prepareTransfer(@NonNull JsonCommand command) { InteropTransferRequestData request = dataValidator.validateAndParseTransferRequest(command); String transferCode = request.getTransferCode(); LocalDate transactionDate = DateUtils.getBusinessLocalDate(); // TODO validate request fee/comission and account quote amount // matching, at CREATE it is debited anyway SavingsAccountTransactionType transactionType = request.getTransactionRole().getTransactionType(); if (transactionType.isDebit()) { SavingsAccount savingsAccount = validateAndGetSavingAccount(request); BigDecimal total = calculateTotalTransferAmount(request, savingsAccount); if (MathUtil.isLessThan(savingsAccount.getWithdrawableBalance(), total)) { throw new InsufficientAccountBalanceException(savingsAccount.getExternalId().getValue(), savingsAccount.getWithdrawableBalance(), null, total); } if (findTransaction(savingsAccount, transferCode, AMOUNT_HOLD.getValue()) != null) { throw new InteropTransferAlreadyOnHoldException(savingsAccount.getExternalId().getValue(), transferCode); } PaymentDetail paymentDetail = instance(findPaymentType(), savingsAccount.getExternalId().getValue(), null, getRoutingCode(), transferCode, null); SavingsAccountTransaction holdTransaction = SavingsAccountTransaction.holdAmount(savingsAccount, savingsAccount.office(), paymentDetail, transactionDate, Money.of(savingsAccount.getCurrency(), total), false); MonetaryCurrency accountCurrency = savingsAccount.getCurrency().copy(); holdTransaction.setRunningBalance( Money.of(accountCurrency, savingsAccount.getWithdrawableBalance().subtract(holdTransaction.getAmount()))); holdTransaction.updateCumulativeBalanceAndDates(accountCurrency, transactionDate); savingsAccount.holdAmount(total); savingsAccount.addTransaction(holdTransaction); savingsAccountRepository.save(savingsAccount); } return InteropTransferResponseData.build(command.commandId(), request.getTransactionCode(), InteropActionState.ACCEPTED, request.getExpiration(), request.getExtensionList(), transferCode, DateUtils.getLocalDateTimeOfTenant()); } @Override @NonNull @Transactional public InteropTransferResponseData commitTransfer(@NonNull JsonCommand command) { InteropTransferRequestData request = dataValidator.validateAndParseTransferRequest(command); boolean isDebit = request.getTransactionRole().getTransactionType().isDebit(); SavingsAccount savingsAccount = validateAndGetSavingAccount(request); String transferCode = request.getTransferCode(); if (findTransaction(savingsAccount, transferCode, (isDebit ? WITHDRAWAL : DEPOSIT).getValue()) != null) { throw new InteropTransferAlreadyCommittedException(savingsAccount.getExternalId().getValue(), transferCode); } LocalDateTime transactionDateTime = DateUtils.getLocalDateTimeOfTenant(); LocalDate transactionDate = DateUtils.getBusinessLocalDate(); DateTimeFormatter fmt = getDateTimeFormatter(command); SavingsAccountTransaction transaction; final boolean backdatedTxnsAllowedTill = false; if (isDebit) { SavingsAccountTransaction holdTransaction = findTransaction(savingsAccount, transferCode, AMOUNT_HOLD.getValue()); if (holdTransaction == null) { throw new InteropTransferMissingException(savingsAccount.getExternalId().getValue(), transferCode); } BigDecimal totalTransferAmount = calculateTotalTransferAmount(request, savingsAccount); if (holdTransaction.getAmount().compareTo(totalTransferAmount) != 0) { throw new InteropTransferMissingException(savingsAccount.getExternalId().getValue(), transferCode); } if (MathUtil.isLessThan(savingsAccount.getWithdrawableBalance().add(holdTransaction.getAmount()), totalTransferAmount)) { throw new InsufficientAccountBalanceException(savingsAccount.getExternalId().getValue(), savingsAccount.getWithdrawableBalance(), null, totalTransferAmount); } if (holdTransaction.getReleaseIdOfHoldAmountTransaction() == null) { SavingsAccountTransaction releaseTransaction = savingsAccountTransactionRepository .saveAndFlush(releaseAmount(holdTransaction, transactionDate)); holdTransaction.updateReleaseId(releaseTransaction.getId()); savingsAccount.releaseOnHoldAmount(holdTransaction.getAmount()); savingsAccount.addTransaction(releaseTransaction); savingsAccountRepository.save(savingsAccount); } SavingsTransactionBooleanValues transactionValues = new SavingsTransactionBooleanValues(false, true, true, false, false); transaction = savingsAccountService.handleWithdrawal(savingsAccount, fmt, transactionDate, request.getAmount().getAmount(), instance(findPaymentType(), savingsAccount.getExternalId().getValue(), null, getRoutingCode(), transferCode, null), transactionValues, backdatedTxnsAllowedTill); } else { transaction = savingsAccountService.handleDeposit(savingsAccount, fmt, transactionDate, request.getAmount().getAmount(), instance(findPaymentType(), savingsAccount.getExternalId().getValue(), null, getRoutingCode(), transferCode, null), false, true, backdatedTxnsAllowedTill); } String note = request.getNote(); if (!StringUtils.isBlank(note)) { noteRepository.save(Note.savingsTransactionNote(savingsAccount, transaction, note)); } return InteropTransferResponseData.build(command.commandId(), request.getTransactionCode(), InteropActionState.ACCEPTED, request.getExpiration(), request.getExtensionList(), request.getTransferCode(), transactionDateTime); } @Override @Transactional public @NonNull InteropTransferResponseData releaseTransfer(@NonNull JsonCommand command) { InteropTransferRequestData request = dataValidator.validateAndParseTransferRequest(command); SavingsAccount savingsAccount = validateAndGetSavingAccount(request); LocalDateTime transactionDateTime = DateUtils.getLocalDateTimeOfTenant(); LocalDate transactionDate = DateUtils.getBusinessLocalDate(); SavingsAccountTransaction holdTransaction = findTransaction(savingsAccount, request.getTransferCode(), AMOUNT_HOLD.getValue()); if (holdTransaction != null && holdTransaction.getReleaseIdOfHoldAmountTransaction() == null) { SavingsAccountTransaction releaseTransaction = releaseAmount(holdTransaction, transactionDate); MonetaryCurrency accountCurrency = savingsAccount.getCurrency().copy(); releaseTransaction .setRunningBalance(Money.of(accountCurrency, savingsAccount.getWithdrawableBalance().add(holdTransaction.getAmount()))); releaseTransaction.updateCumulativeBalanceAndDates(accountCurrency, transactionDate); releaseTransaction = savingsAccountTransactionRepository.saveAndFlush(releaseTransaction); holdTransaction.updateReleaseId(releaseTransaction.getId()); savingsAccount.releaseOnHoldAmount(holdTransaction.getAmount()); savingsAccount.addTransaction(releaseTransaction); savingsAccountRepository.save(savingsAccount); } else { throw new InteropTransferMissingException(savingsAccount.getExternalId().getValue(), request.getTransferCode()); } return InteropTransferResponseData.build(command.commandId(), request.getTransactionCode(), InteropActionState.ACCEPTED, request.getExpiration(), request.getExtensionList(), request.getTransferCode(), transactionDateTime); } @Override public @NonNull InteropKycResponseData getKyc(@NonNull String accountId) { SavingsAccount savingsAccount = validateAndGetSavingAccount(accountId); Long clientId = savingsAccount.getClient().getId(); try { final InteropServiceImpl.KycMapper rm = new InteropServiceImpl.KycMapper(sqlGenerator); final String sql = "select " + rm.schema() + " where c.id = ?"; final InteropKycData accountKyc = this.jdbcTemplate.queryForObject(sql, rm, new Object[] { clientId }); // NOSONAR return InteropKycResponseData.build(accountKyc); } catch (final EmptyResultDataAccessException e) { throw new InteropKycDataNotFoundException(clientId, e); } } @Override public @NonNull String disburseLoan(@NonNull String accountId, String apiRequestBodyAsJson) { Loan loan = validateAndGetLoan(accountId); Long loanId = loan.getId(); final CommandWrapperBuilder builder = new CommandWrapperBuilder().withJson(apiRequestBodyAsJson); final CommandWrapper commandRequest = builder.disburseLoanApplication(loanId).build(); CommandProcessingResult result = this.commandsSourceWritePlatformService.logCommandSource(commandRequest); return this.toApiJsonSerializer.serialize(result); } @Override public @NonNull String loanRepayment(@NonNull String accountId, String apiRequestBodyAsJson) { Loan loan = validateAndGetLoan(accountId); Long loanId = loan.getId(); final CommandWrapperBuilder builder = new CommandWrapperBuilder().withJson(apiRequestBodyAsJson); final CommandWrapper commandRequest = builder.loanRepaymentTransaction(loanId).build(); CommandProcessingResult result = this.commandsSourceWritePlatformService.logCommandSource(commandRequest); return this.toApiJsonSerializer.serialize(result); } private SavingsAccount validateAndGetSavingAccount(String accountId) { SavingsAccount savingsAccount = savingsAccountRepository.findByExternalId(ExternalIdFactory.produce(accountId)); if (savingsAccount == null) { throw new SavingsAccountNotFoundException(accountId); } return savingsAccount; } private Loan validateAndGetLoan(String accountId) { Loan loan = loanRepositoryWrapper.findNonClosedLoanByAccountNumber(accountId); if (loan == null) { throw new LoanNotFoundException(accountId); } return loan; } private SavingsAccount validateAndGetSavingAccount(@NonNull InteropRequestData request) { // TODO: error handling SavingsAccount savingsAccount = validateAndGetSavingAccount(request.getAccountId()); savingsAccount.setHelpers(savingsAccountTransactionSummaryWrapper, savingsHelper); ApplicationCurrency requestCurrency = currencyRepository.findOneByCode(request.getAmount().getCurrency()); if (!savingsAccount.getCurrency().getCode().equals(requestCurrency.getCode())) { throw new DifferentCurrenciesException(savingsAccount.getCurrency().getCode(), requestCurrency.getCode()); } SavingsAccountTransactionType transactionType = request.getTransactionRole().getTransactionType(); if (!savingsAccount.isTransactionAllowed(transactionType, request.getExpirationLocalDate())) { throw new InteropAccountTransactionNotAllowedException(request.getAccountId()); } request.normalizeAmounts(savingsAccount.getCurrency()); return savingsAccount; } private BigDecimal calculateTotalTransferAmount(@NonNull InteropTransferRequestData request, @NonNull SavingsAccount savingsAccount) { BigDecimal total = request.getAmount().getAmount(); MoneyData requestFee = request.getFspFee(); if (requestFee != null) { if (!savingsAccount.getCurrency().getCode().equals(requestFee.getCurrency())) { throw new DifferentCurrenciesException(savingsAccount.getCurrency().getCode(), requestFee.getCurrency()); } // TODO: compare with calculated quote fee total = MathUtil.add(total, requestFee.getAmount()); } MoneyData requestCommission = request.getFspCommission(); if (requestCommission != null) { if (!savingsAccount.getCurrency().getCode().equals(requestCommission.getCurrency())) { throw new DifferentCurrenciesException(savingsAccount.getCurrency().getCode(), requestCommission.getCurrency()); } // TODO: compare with calculated quote commission total = MathUtil.subtractToZero(total, requestCommission.getAmount()); } return total; } private DateTimeFormatter getDateTimeFormatter(@NonNull JsonCommand command) { Locale locale = command.extractLocale(); if (locale == null) { locale = DEFAULT_LOCALE; } String dateFormat = command.dateFormat(); if (StringUtils.isEmpty(dateFormat)) { dateFormat = "yyyy-MM-dd HH:mm:ss.SSS"; } return DateTimeFormatter.ofPattern(dateFormat).withLocale(locale); } PaymentType findPaymentType() { List<PaymentType> paymentTypes = paymentTypeRepository.findAll(); for (PaymentType paymentType : paymentTypes) { if (!paymentType.getIsCashPayment()) { return paymentType; } // TODO: for now first not cash is retured: // 1. must be added as initial setup, // 2. if more than one non-cashe type added then update this code } return null; } private SavingsAccountTransaction findTransaction(SavingsAccount savingsAccount, String transactionCode, Integer transactionTypeValue) { return savingsAccount.getTransactions().stream().filter(t -> transactionTypeValue.equals(t.getTypeOf())).filter(t -> { PaymentDetail detail = t.getPaymentDetail(); return detail != null && getRoutingCode().equals(detail.getRoutingCode()) && transactionCode.equals(detail.getReceiptNumber()); }).findFirst().orElse(null); } public InteropIdentifier findIdentifier(@NonNull InteropIdentifierType idType, @NonNull String idValue, String subIdOrType) { return identifierRepository.findOneByTypeAndValueAndSubType(idType, idValue, subIdOrType); } /* * Guaranteed to throw an exception no matter what the data integrity issue is. */ private void handleInteropDataIntegrityIssues(InteropIdentifierType idType, String accountId, final Throwable realCause, final Exception dve) { if (realCause.getMessage().contains("uk_interop_identifier_account")) { throw new PlatformDataIntegrityException("error.msg.interop.duplicate.account.identifier", "Account identifier of type `" + idType.name() + "' already exists for account with externalId `" + accountId + "`", "idType", idType.name(), accountId); } log.error("Error occured.", dve); throw ErrorHandler.getMappable(dve, "error.msg.interop.unknown.data.integrity.issue", "Unknown data integrity issue with resource: " + realCause.getMessage()); } @NonNull String getRoutingCode() { return DEFAULT_ROUTING_CODE; } }
apache/flink
35,479
flink-runtime/src/test/java/org/apache/flink/runtime/dispatcher/runner/SessionDispatcherLeaderProcessTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.dispatcher.runner; import org.apache.flink.api.common.JobID; import org.apache.flink.core.testutils.FlinkAssertions; import org.apache.flink.core.testutils.OneShotLatch; import org.apache.flink.runtime.client.DuplicateJobSubmissionException; import org.apache.flink.runtime.client.JobSubmissionException; import org.apache.flink.runtime.highavailability.JobResultStore; import org.apache.flink.runtime.jobgraph.JobGraph; import org.apache.flink.runtime.jobgraph.JobGraphTestUtils; import org.apache.flink.runtime.jobmanager.ExecutionPlanStore; import org.apache.flink.runtime.jobmaster.JobResult; import org.apache.flink.runtime.messages.Acknowledge; import org.apache.flink.runtime.testutils.TestingExecutionPlanStore; import org.apache.flink.runtime.testutils.TestingJobResultStore; import org.apache.flink.runtime.util.TestingFatalErrorHandler; import org.apache.flink.runtime.webmonitor.TestingDispatcherGateway; import org.apache.flink.streaming.api.graph.ExecutionPlan; import org.apache.flink.util.ExecutorUtils; import org.apache.flink.util.FlinkException; import org.apache.flink.util.TestLoggerExtension; import org.apache.flink.util.concurrent.FutureUtils; import org.apache.flink.util.function.ThrowingConsumer; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Set; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; import static org.apache.flink.core.testutils.FlinkAssertions.STREAM_THROWABLE; import static org.apache.flink.core.testutils.FlinkAssertions.anyCauseMatches; import static org.apache.flink.core.testutils.FlinkAssertions.assertThatFuture; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; /** Tests for the {@link SessionDispatcherLeaderProcess}. */ @ExtendWith(TestLoggerExtension.class) class SessionDispatcherLeaderProcessTest { private static final JobGraph JOB_GRAPH = JobGraphTestUtils.emptyJobGraph(); private static ExecutorService ioExecutor; private final UUID leaderSessionId = UUID.randomUUID(); private TestingFatalErrorHandler fatalErrorHandler; private ExecutionPlanStore executionPlanStore; private JobResultStore jobResultStore; private AbstractDispatcherLeaderProcess.DispatcherGatewayServiceFactory dispatcherServiceFactory; @BeforeAll static void setupClass() { ioExecutor = Executors.newSingleThreadExecutor(); } @BeforeEach void setup() { fatalErrorHandler = new TestingFatalErrorHandler(); executionPlanStore = TestingExecutionPlanStore.newBuilder().build(); jobResultStore = TestingJobResultStore.builder().build(); dispatcherServiceFactory = createFactoryBasedOnGenericSupplier( () -> TestingDispatcherGatewayService.newBuilder().build()); } @AfterEach void teardown() throws Exception { if (fatalErrorHandler != null) { fatalErrorHandler.rethrowError(); fatalErrorHandler = null; } } @AfterAll static void teardownClass() { if (ioExecutor != null) { ExecutorUtils.gracefulShutdown(5L, TimeUnit.SECONDS, ioExecutor); } } @Test void start_afterClose_doesNotHaveAnEffect() throws Exception { final SessionDispatcherLeaderProcess dispatcherLeaderProcess = createDispatcherLeaderProcess(); dispatcherLeaderProcess.close(); dispatcherLeaderProcess.start(); assertThat(dispatcherLeaderProcess.getState()) .isEqualTo(SessionDispatcherLeaderProcess.State.STOPPED); } @Test void testStartTriggeringDispatcherServiceCreation() throws Exception { dispatcherServiceFactory = createFactoryBasedOnGenericSupplier( () -> TestingDispatcherGatewayService.newBuilder().build()); try (final SessionDispatcherLeaderProcess dispatcherLeaderProcess = createDispatcherLeaderProcess()) { dispatcherLeaderProcess.start(); assertThat(dispatcherLeaderProcess.getState()) .isEqualTo(SessionDispatcherLeaderProcess.State.RUNNING); } } @Test void testRecoveryWithExecutionPlanButNoDirtyJobResult() throws Exception { testJobRecovery( Collections.singleton(JOB_GRAPH), Collections.emptySet(), actualRecoveredExecutionPlans -> assertThat(actualRecoveredExecutionPlans) .singleElement() .isEqualTo(JOB_GRAPH), actualRecoveredDirtyJobResults -> assertThat(actualRecoveredDirtyJobResults).isEmpty()); } @Test void testRecoveryWithExecutionPlanAndMatchingDirtyJobResult() throws Exception { final JobResult matchingDirtyJobResult = TestingJobResultStore.createSuccessfulJobResult(JOB_GRAPH.getJobID()); testJobRecovery( Collections.singleton(JOB_GRAPH), Collections.singleton(matchingDirtyJobResult), actualRecoveredExecutionPlans -> assertThat(actualRecoveredExecutionPlans).isEmpty(), actualRecoveredDirtyJobResults -> assertThat(actualRecoveredDirtyJobResults) .singleElement() .isEqualTo(matchingDirtyJobResult)); } @Test void testRecoveryWithMultipleExecutionPlansAndOneMatchingDirtyJobResult() throws Exception { final JobResult matchingDirtyJobResult = TestingJobResultStore.createSuccessfulJobResult(JOB_GRAPH.getJobID()); final ExecutionPlan otherExecutionPlan = JobGraphTestUtils.emptyJobGraph(); testJobRecovery( Arrays.asList(otherExecutionPlan, JOB_GRAPH), Collections.singleton(matchingDirtyJobResult), actualRecoveredExecutionPlans -> assertThat(actualRecoveredExecutionPlans) .singleElement() .isEqualTo(otherExecutionPlan), actualRecoveredDirtyJobResults -> assertThat(actualRecoveredDirtyJobResults) .singleElement() .isEqualTo(matchingDirtyJobResult)); } @Test void testRecoveryWithoutExecutionPlanButDirtyJobResult() throws Exception { final JobResult dirtyJobResult = TestingJobResultStore.createSuccessfulJobResult(new JobID()); testJobRecovery( Collections.emptyList(), Collections.singleton(dirtyJobResult), actualRecoveredExecutionPlans -> assertThat(actualRecoveredExecutionPlans).isEmpty(), actualRecoveredDirtyJobResults -> assertThat(actualRecoveredDirtyJobResults) .singleElement() .isEqualTo(dirtyJobResult)); } private void testJobRecovery( Collection<ExecutionPlan> executionPlansToRecover, Set<JobResult> dirtyJobResults, Consumer<Collection<ExecutionPlan>> recoveredExecutionPlanAssertion, Consumer<Collection<JobResult>> recoveredDirtyJobResultAssertion) throws Exception { executionPlanStore = TestingExecutionPlanStore.newBuilder() .setInitialExecutionPlans(executionPlansToRecover) .build(); jobResultStore = TestingJobResultStore.builder() .withGetDirtyResultsSupplier(() -> dirtyJobResults) .build(); final CompletableFuture<Collection<ExecutionPlan>> recoveredExecutionPlansFuture = new CompletableFuture<>(); final CompletableFuture<Collection<JobResult>> recoveredDirtyJobResultsFuture = new CompletableFuture<>(); dispatcherServiceFactory = (ignoredDispatcherId, recoveredJobs, recoveredDirtyJobResults, ignoredExecutionPlanWriter, ignoredJobResultStore) -> { recoveredExecutionPlansFuture.complete(recoveredJobs); recoveredDirtyJobResultsFuture.complete(recoveredDirtyJobResults); return TestingDispatcherGatewayService.newBuilder().build(); }; try (final SessionDispatcherLeaderProcess dispatcherLeaderProcess = createDispatcherLeaderProcess()) { dispatcherLeaderProcess.start(); recoveredExecutionPlanAssertion.accept(recoveredExecutionPlansFuture.get()); recoveredDirtyJobResultAssertion.accept(recoveredDirtyJobResultsFuture.get()); } } @Test void testRecoveryWhileExecutionPlanRecoveryIsScheduledConcurrently() throws Exception { final JobResult dirtyJobResult = TestingJobResultStore.createSuccessfulJobResult(new JobID()); OneShotLatch recoveryInitiatedLatch = new OneShotLatch(); OneShotLatch jobGraphAddedLatch = new OneShotLatch(); executionPlanStore = TestingExecutionPlanStore.newBuilder() // mimic behavior when recovering a ExecutionPlan that is marked for // deletion .setRecoverExecutionPlanFunction((jobId, jobs) -> null) .build(); jobResultStore = TestingJobResultStore.builder() .withGetDirtyResultsSupplier( () -> { recoveryInitiatedLatch.trigger(); try { jobGraphAddedLatch.await(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } return Collections.singleton(dirtyJobResult); }) .build(); final CompletableFuture<Collection<ExecutionPlan>> recoveredExecutionPlansFuture = new CompletableFuture<>(); final CompletableFuture<Collection<JobResult>> recoveredDirtyJobResultsFuture = new CompletableFuture<>(); dispatcherServiceFactory = (ignoredDispatcherId, recoveredJobs, recoveredDirtyJobResults, ignoredExecutionPlanWriter, ignoredJobResultStore) -> { recoveredExecutionPlansFuture.complete(recoveredJobs); recoveredDirtyJobResultsFuture.complete(recoveredDirtyJobResults); return TestingDispatcherGatewayService.newBuilder().build(); }; try (final SessionDispatcherLeaderProcess dispatcherLeaderProcess = createDispatcherLeaderProcess()) { dispatcherLeaderProcess.start(); // start returns without the initial recovery being completed // mimic ZK message about an added jobgraph while the recovery is ongoing recoveryInitiatedLatch.await(); dispatcherLeaderProcess.onAddedExecutionPlan(dirtyJobResult.getJobId()); jobGraphAddedLatch.trigger(); assertThatFuture(recoveredExecutionPlansFuture) .eventuallySucceeds() .satisfies( recoverExecutionPlans -> assertThat(recoverExecutionPlans).isEmpty()); assertThatFuture(recoveredDirtyJobResultsFuture) .eventuallySucceeds() .satisfies( recoveredDirtyJobResults -> assertThat(recoveredDirtyJobResults) .containsExactly(dirtyJobResult)); } } @Test void closeAsync_stopsExecutionPlanStoreAndDispatcher() throws Exception { final CompletableFuture<Void> jobGraphStopFuture = new CompletableFuture<>(); executionPlanStore = TestingExecutionPlanStore.newBuilder() .setStopRunnable(() -> jobGraphStopFuture.complete(null)) .build(); final CompletableFuture<Void> dispatcherServiceTerminationFuture = new CompletableFuture<>(); dispatcherServiceFactory = createFactoryBasedOnGenericSupplier( () -> TestingDispatcherGatewayService.newBuilder() .setTerminationFuture(dispatcherServiceTerminationFuture) .withManualTerminationFutureCompletion() .build()); try (final SessionDispatcherLeaderProcess dispatcherLeaderProcess = createDispatcherLeaderProcess()) { dispatcherLeaderProcess.start(); // wait for the creation of the DispatcherGatewayService dispatcherLeaderProcess.getDispatcherGateway().get(); final CompletableFuture<Void> terminationFuture = dispatcherLeaderProcess.closeAsync(); assertThat(jobGraphStopFuture).isNotDone(); assertThat(terminationFuture).isNotDone(); dispatcherServiceTerminationFuture.complete(null); // verify that we shut down the ExecutionPlanStore jobGraphStopFuture.get(); // verify that we completed the dispatcher leader process shut down terminationFuture.get(); } } @Test void unexpectedDispatcherServiceTerminationWhileRunning_callsFatalErrorHandler() { final CompletableFuture<Void> terminationFuture = new CompletableFuture<>(); dispatcherServiceFactory = createFactoryBasedOnGenericSupplier( () -> TestingDispatcherGatewayService.newBuilder() .setTerminationFuture(terminationFuture) .build()); final SessionDispatcherLeaderProcess dispatcherLeaderProcess = createDispatcherLeaderProcess(); dispatcherLeaderProcess.start(); final FlinkException expectedFailure = new FlinkException("Expected test failure."); terminationFuture.completeExceptionally(expectedFailure); final Throwable error = fatalErrorHandler.getErrorFuture().join(); assertThat(error).rootCause().isEqualTo(expectedFailure); fatalErrorHandler.clearError(); } @Test void unexpectedDispatcherServiceTerminationWhileNotRunning_doesNotCallFatalErrorHandler() { final CompletableFuture<Void> terminationFuture = new CompletableFuture<>(); dispatcherServiceFactory = createFactoryBasedOnGenericSupplier( () -> TestingDispatcherGatewayService.newBuilder() .setTerminationFuture(terminationFuture) .withManualTerminationFutureCompletion() .build()); final SessionDispatcherLeaderProcess dispatcherLeaderProcess = createDispatcherLeaderProcess(); dispatcherLeaderProcess.start(); dispatcherLeaderProcess.closeAsync(); final FlinkException expectedFailure = new FlinkException("Expected test failure."); terminationFuture.completeExceptionally(expectedFailure); assertThatThrownBy(() -> fatalErrorHandler.getErrorFuture().get(10, TimeUnit.MILLISECONDS)) .isInstanceOf(TimeoutException.class); } @Test void confirmLeaderSessionFuture_completesAfterDispatcherServiceHasBeenStarted() throws Exception { final OneShotLatch createDispatcherServiceLatch = new OneShotLatch(); final String dispatcherAddress = "myAddress"; final TestingDispatcherGateway dispatcherGateway = TestingDispatcherGateway.newBuilder().setAddress(dispatcherAddress).build(); dispatcherServiceFactory = createFactoryBasedOnGenericSupplier( () -> { try { createDispatcherServiceLatch.await(); } catch (InterruptedException e) { throw new RuntimeException(e); } return TestingDispatcherGatewayService.newBuilder() .setDispatcherGateway(dispatcherGateway) .build(); }); try (final SessionDispatcherLeaderProcess dispatcherLeaderProcess = createDispatcherLeaderProcess()) { final CompletableFuture<String> confirmLeaderSessionFuture = dispatcherLeaderProcess.getLeaderAddressFuture(); dispatcherLeaderProcess.start(); assertThat(confirmLeaderSessionFuture).isNotDone(); createDispatcherServiceLatch.trigger(); assertThatFuture(confirmLeaderSessionFuture) .eventuallySucceeds() .isEqualTo(dispatcherAddress); } } @Test void closeAsync_duringJobRecovery_preventsDispatcherServiceCreation() throws Exception { final OneShotLatch jobRecoveryStartedLatch = new OneShotLatch(); final OneShotLatch completeJobRecoveryLatch = new OneShotLatch(); final OneShotLatch createDispatcherServiceLatch = new OneShotLatch(); this.executionPlanStore = TestingExecutionPlanStore.newBuilder() .setJobIdsFunction( storedJobs -> { jobRecoveryStartedLatch.trigger(); completeJobRecoveryLatch.await(); return storedJobs; }) .build(); this.dispatcherServiceFactory = createFactoryBasedOnGenericSupplier( () -> { createDispatcherServiceLatch.trigger(); return TestingDispatcherGatewayService.newBuilder().build(); }); try (final SessionDispatcherLeaderProcess dispatcherLeaderProcess = createDispatcherLeaderProcess()) { dispatcherLeaderProcess.start(); jobRecoveryStartedLatch.await(); dispatcherLeaderProcess.closeAsync(); completeJobRecoveryLatch.trigger(); assertThatThrownBy( () -> createDispatcherServiceLatch.await(10L, TimeUnit.MILLISECONDS), "No dispatcher service should be created after the process has been stopped.") .isInstanceOf(TimeoutException.class); } } @Test void onRemovedExecutionPlan_terminatesRunningJob() throws Exception { executionPlanStore = TestingExecutionPlanStore.newBuilder() .setInitialExecutionPlans(Collections.singleton(JOB_GRAPH)) .build(); final CompletableFuture<JobID> terminateJobFuture = new CompletableFuture<>(); final TestingDispatcherGatewayService testingDispatcherService = TestingDispatcherGatewayService.newBuilder() .setOnRemovedJobGraphFunction( jobID -> { terminateJobFuture.complete(jobID); return FutureUtils.completedVoidFuture(); }) .build(); dispatcherServiceFactory = createFactoryBasedOnGenericSupplier(() -> testingDispatcherService); final ExecutorService executorService = Executors.newSingleThreadExecutor(); try (final SessionDispatcherLeaderProcess dispatcherLeaderProcess = createDispatcherLeaderProcess()) { dispatcherLeaderProcess.start(); // wait for the dispatcher process to be created dispatcherLeaderProcess.getDispatcherGateway().get(); // now remove the Job from the ExecutionPlanStore and notify the dispatcher service executionPlanStore.globalCleanupAsync(JOB_GRAPH.getJobID(), executorService).join(); dispatcherLeaderProcess.onRemovedExecutionPlan(JOB_GRAPH.getJobID()); assertThat(terminateJobFuture.get()).isEqualTo(JOB_GRAPH.getJobID()); } finally { assertThat(executorService.shutdownNow()).isEmpty(); } } @Test void onRemovedExecutionPlan_failingRemovalCall_failsFatally() throws Exception { final FlinkException testException = new FlinkException("Test exception"); final TestingDispatcherGatewayService testingDispatcherService = TestingDispatcherGatewayService.newBuilder() .setOnRemovedJobGraphFunction( jobID -> FutureUtils.completedExceptionally(testException)) .build(); dispatcherServiceFactory = createFactoryBasedOnGenericSupplier(() -> testingDispatcherService); try (final SessionDispatcherLeaderProcess dispatcherLeaderProcess = createDispatcherLeaderProcess()) { dispatcherLeaderProcess.start(); // wait for the dispatcher process to be created dispatcherLeaderProcess.getDispatcherGateway().get(); // now notify the dispatcher service dispatcherLeaderProcess.onRemovedExecutionPlan(JOB_GRAPH.getJobID()); final Throwable fatalError = fatalErrorHandler.getErrorFuture().join(); assertThat(fatalError).hasCause(testException); fatalErrorHandler.clearError(); } } @Test void onAddedExecutionPlan_submitsRecoveredJob() throws Exception { final CompletableFuture<ExecutionPlan> submittedJobFuture = new CompletableFuture<>(); final TestingDispatcherGateway testingDispatcherGateway = TestingDispatcherGateway.newBuilder() .setSubmitFunction( submittedJob -> { submittedJobFuture.complete(submittedJob); return CompletableFuture.completedFuture(Acknowledge.get()); }) .build(); dispatcherServiceFactory = createFactoryBasedOnGenericSupplier( () -> TestingDispatcherGatewayService.newBuilder() .setDispatcherGateway(testingDispatcherGateway) .build()); try (final SessionDispatcherLeaderProcess dispatcherLeaderProcess = createDispatcherLeaderProcess()) { dispatcherLeaderProcess.start(); // wait first for the dispatcher service to be created dispatcherLeaderProcess.getDispatcherGateway().get(); executionPlanStore.putExecutionPlan(JOB_GRAPH); dispatcherLeaderProcess.onAddedExecutionPlan(JOB_GRAPH.getJobID()); final ExecutionPlan submittedExecutionPlan = submittedJobFuture.get(); assertThat(submittedExecutionPlan.getJobID()).isEqualTo(JOB_GRAPH.getJobID()); } } @Test void onAddedExecutionPlan_ifNotRunning_isBeingIgnored() throws Exception { final CompletableFuture<JobID> recoveredJobFuture = new CompletableFuture<>(); executionPlanStore = TestingExecutionPlanStore.newBuilder() .setRecoverExecutionPlanFunction( (jobId, jobGraphs) -> { recoveredJobFuture.complete(jobId); return jobGraphs.get(jobId); }) .build(); try (final SessionDispatcherLeaderProcess dispatcherLeaderProcess = createDispatcherLeaderProcess()) { dispatcherLeaderProcess.start(); // wait until the process has started the dispatcher dispatcherLeaderProcess.getDispatcherGateway().get(); // now add the job graph executionPlanStore.putExecutionPlan(JOB_GRAPH); dispatcherLeaderProcess.closeAsync(); dispatcherLeaderProcess.onAddedExecutionPlan(JOB_GRAPH.getJobID()); assertThatThrownBy( () -> recoveredJobFuture.get(10L, TimeUnit.MILLISECONDS), "onAddedExecutionPlan should be ignored if the leader process is not running.") .isInstanceOf(TimeoutException.class); } } @Test void onAddedExecutionPlan_failingRecovery_propagatesTheFailure() throws Exception { final FlinkException expectedFailure = new FlinkException("Expected failure"); executionPlanStore = TestingExecutionPlanStore.newBuilder() .setRecoverExecutionPlanFunction( (ignoredA, ignoredB) -> { throw expectedFailure; }) .build(); try (final SessionDispatcherLeaderProcess dispatcherLeaderProcess = createDispatcherLeaderProcess()) { dispatcherLeaderProcess.start(); // wait first for the dispatcher service to be created dispatcherLeaderProcess.getDispatcherGateway().get(); executionPlanStore.putExecutionPlan(JOB_GRAPH); dispatcherLeaderProcess.onAddedExecutionPlan(JOB_GRAPH.getJobID()); assertThatFuture(fatalErrorHandler.getErrorFuture()) .eventuallySucceeds() .extracting(FlinkAssertions::chainOfCauses, STREAM_THROWABLE) .contains(expectedFailure); assertThat(dispatcherLeaderProcess.getState()) .isEqualTo(SessionDispatcherLeaderProcess.State.STOPPED); fatalErrorHandler.clearError(); } } @Test void recoverJobs_withRecoveryFailure_failsFatally() throws Exception { final FlinkException testException = new FlinkException("Test exception"); executionPlanStore = TestingExecutionPlanStore.newBuilder() .setRecoverExecutionPlanFunction( (ignoredA, ignoredB) -> { throw testException; }) .setInitialExecutionPlans(Collections.singleton(JOB_GRAPH)) .build(); runJobRecoveryFailureTest(testException); } @Test void recoverJobs_withJobIdRecoveryFailure_failsFatally() throws Exception { final FlinkException testException = new FlinkException("Test exception"); executionPlanStore = TestingExecutionPlanStore.newBuilder() .setJobIdsFunction( ignored -> { throw testException; }) .build(); runJobRecoveryFailureTest(testException); } private void runJobRecoveryFailureTest(FlinkException testException) throws Exception { try (final SessionDispatcherLeaderProcess dispatcherLeaderProcess = createDispatcherLeaderProcess()) { dispatcherLeaderProcess.start(); // we expect that a fatal error occurred assertThatFuture(fatalErrorHandler.getErrorFuture()) .eventuallySucceeds() .satisfies( error -> assertThat(error) .satisfies( anyCauseMatches( testException.getClass(), testException.getMessage()))); fatalErrorHandler.clearError(); } } @Test void onAddedExecutionPlan_failingRecoveredJobSubmission_failsFatally() throws Exception { final TestingDispatcherGateway dispatcherGateway = TestingDispatcherGateway.newBuilder() .setSubmitFunction( jobGraph -> FutureUtils.completedExceptionally( new JobSubmissionException( jobGraph.getJobID(), "test exception"))) .build(); runOnAddedExecutionPlanTest( dispatcherGateway, this::verifyOnAddedExecutionPlanResultFailsFatally); } private void verifyOnAddedExecutionPlanResultFailsFatally( TestingFatalErrorHandler fatalErrorHandler) { final Throwable actualCause = fatalErrorHandler.getErrorFuture().join(); assertThat(actualCause) .extracting(FlinkAssertions::chainOfCauses, FlinkAssertions.STREAM_THROWABLE) .hasAtLeastOneElementOfType(JobSubmissionException.class); fatalErrorHandler.clearError(); } @Test void onAddedExecutionPlan_duplicateJobSubmissionDueToFalsePositive_willBeIgnored() throws Exception { final TestingDispatcherGateway dispatcherGateway = TestingDispatcherGateway.newBuilder() .setSubmitFunction( jobGraph -> FutureUtils.completedExceptionally( DuplicateJobSubmissionException.of( jobGraph.getJobID()))) .build(); runOnAddedExecutionPlanTest( dispatcherGateway, this::verifyOnAddedExecutionPlanResultDidNotFail); } private void runOnAddedExecutionPlanTest( TestingDispatcherGateway dispatcherGateway, ThrowingConsumer<TestingFatalErrorHandler, Exception> verificationLogic) throws Exception { executionPlanStore = TestingExecutionPlanStore.newBuilder() .setInitialExecutionPlans(Collections.singleton(JOB_GRAPH)) .build(); dispatcherServiceFactory = createFactoryBasedOnExecutionPlans( jobGraphs -> { assertThat(jobGraphs).containsExactlyInAnyOrder(JOB_GRAPH); return TestingDispatcherGatewayService.newBuilder() .setDispatcherGateway(dispatcherGateway) .build(); }); try (final SessionDispatcherLeaderProcess dispatcherLeaderProcess = createDispatcherLeaderProcess()) { dispatcherLeaderProcess.start(); dispatcherLeaderProcess.getDispatcherGateway().get(); dispatcherLeaderProcess.onAddedExecutionPlan(JOB_GRAPH.getJobID()); verificationLogic.accept(fatalErrorHandler); } } private AbstractDispatcherLeaderProcess.DispatcherGatewayServiceFactory createFactoryBasedOnExecutionPlans( Function< Collection<ExecutionPlan>, AbstractDispatcherLeaderProcess.DispatcherGatewayService> createFunction) { return (ignoredDispatcherId, recoveredJobs, ignoredRecoveredDirtyJobResults, ignoredExecutionPlanWriter, ignoredJobResultStore) -> createFunction.apply(recoveredJobs); } private AbstractDispatcherLeaderProcess.DispatcherGatewayServiceFactory createFactoryBasedOnGenericSupplier( Supplier<AbstractDispatcherLeaderProcess.DispatcherGatewayService> supplier) { return (ignoredDispatcherId, ignoredRecoveredJobs, ignoredRecoveredDirtyJobResults, ignoredExecutionPlanWriter, ignoredJobResultStore) -> supplier.get(); } private void verifyOnAddedExecutionPlanResultDidNotFail( TestingFatalErrorHandler fatalErrorHandler) { assertThatThrownBy( () -> fatalErrorHandler.getErrorFuture().get(10L, TimeUnit.MILLISECONDS), "Expected that duplicate job submissions due to false job recoveries are ignored.") .isInstanceOf(TimeoutException.class); } private SessionDispatcherLeaderProcess createDispatcherLeaderProcess() { return SessionDispatcherLeaderProcess.create( leaderSessionId, dispatcherServiceFactory, executionPlanStore, jobResultStore, ioExecutor, fatalErrorHandler); } }
googleapis/google-cloud-java
35,423
java-billing/google-cloud-billing/src/main/java/com/google/cloud/billing/v1/stub/CloudBillingStubSettings.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.billing.v1.stub; import static com.google.cloud.billing.v1.CloudBillingClient.ListBillingAccountsPagedResponse; import static com.google.cloud.billing.v1.CloudBillingClient.ListProjectBillingInfoPagedResponse; import com.google.api.core.ApiFunction; import com.google.api.core.ApiFuture; import com.google.api.core.BetaApi; import com.google.api.core.ObsoleteApi; import com.google.api.gax.core.GaxProperties; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.GrpcTransportChannel; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.httpjson.GaxHttpJsonProperties; import com.google.api.gax.httpjson.HttpJsonTransportChannel; import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider; import com.google.api.gax.retrying.RetrySettings; import com.google.api.gax.rpc.ApiCallContext; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.PageContext; import com.google.api.gax.rpc.PagedCallSettings; import com.google.api.gax.rpc.PagedListDescriptor; import com.google.api.gax.rpc.PagedListResponseFactory; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.billing.v1.BillingAccount; import com.google.cloud.billing.v1.CreateBillingAccountRequest; import com.google.cloud.billing.v1.GetBillingAccountRequest; import com.google.cloud.billing.v1.GetProjectBillingInfoRequest; import com.google.cloud.billing.v1.ListBillingAccountsRequest; import com.google.cloud.billing.v1.ListBillingAccountsResponse; import com.google.cloud.billing.v1.ListProjectBillingInfoRequest; import com.google.cloud.billing.v1.ListProjectBillingInfoResponse; import com.google.cloud.billing.v1.MoveBillingAccountRequest; import com.google.cloud.billing.v1.ProjectBillingInfo; import com.google.cloud.billing.v1.UpdateBillingAccountRequest; import com.google.cloud.billing.v1.UpdateProjectBillingInfoRequest; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.iam.v1.GetIamPolicyRequest; import com.google.iam.v1.Policy; import com.google.iam.v1.SetIamPolicyRequest; import com.google.iam.v1.TestIamPermissionsRequest; import com.google.iam.v1.TestIamPermissionsResponse; import java.io.IOException; import java.time.Duration; import java.util.List; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link CloudBillingStub}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (cloudbilling.googleapis.com) and default port (443) are used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the * [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings) * of getBillingAccount: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * CloudBillingStubSettings.Builder cloudBillingSettingsBuilder = * CloudBillingStubSettings.newBuilder(); * cloudBillingSettingsBuilder * .getBillingAccountSettings() * .setRetrySettings( * cloudBillingSettingsBuilder * .getBillingAccountSettings() * .getRetrySettings() * .toBuilder() * .setInitialRetryDelayDuration(Duration.ofSeconds(1)) * .setInitialRpcTimeoutDuration(Duration.ofSeconds(5)) * .setMaxAttempts(5) * .setMaxRetryDelayDuration(Duration.ofSeconds(30)) * .setMaxRpcTimeoutDuration(Duration.ofSeconds(60)) * .setRetryDelayMultiplier(1.3) * .setRpcTimeoutMultiplier(1.5) * .setTotalTimeoutDuration(Duration.ofSeconds(300)) * .build()); * CloudBillingStubSettings cloudBillingSettings = cloudBillingSettingsBuilder.build(); * }</pre> * * Please refer to the [Client Side Retry * Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for * additional support in setting retries. */ @Generated("by gapic-generator-java") public class CloudBillingStubSettings extends StubSettings<CloudBillingStubSettings> { /** The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder() .add("https://www.googleapis.com/auth/cloud-billing") .add("https://www.googleapis.com/auth/cloud-billing.readonly") .add("https://www.googleapis.com/auth/cloud-platform") .build(); private final UnaryCallSettings<GetBillingAccountRequest, BillingAccount> getBillingAccountSettings; private final PagedCallSettings< ListBillingAccountsRequest, ListBillingAccountsResponse, ListBillingAccountsPagedResponse> listBillingAccountsSettings; private final UnaryCallSettings<UpdateBillingAccountRequest, BillingAccount> updateBillingAccountSettings; private final UnaryCallSettings<CreateBillingAccountRequest, BillingAccount> createBillingAccountSettings; private final PagedCallSettings< ListProjectBillingInfoRequest, ListProjectBillingInfoResponse, ListProjectBillingInfoPagedResponse> listProjectBillingInfoSettings; private final UnaryCallSettings<GetProjectBillingInfoRequest, ProjectBillingInfo> getProjectBillingInfoSettings; private final UnaryCallSettings<UpdateProjectBillingInfoRequest, ProjectBillingInfo> updateProjectBillingInfoSettings; private final UnaryCallSettings<GetIamPolicyRequest, Policy> getIamPolicySettings; private final UnaryCallSettings<SetIamPolicyRequest, Policy> setIamPolicySettings; private final UnaryCallSettings<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsSettings; private final UnaryCallSettings<MoveBillingAccountRequest, BillingAccount> moveBillingAccountSettings; private static final PagedListDescriptor< ListBillingAccountsRequest, ListBillingAccountsResponse, BillingAccount> LIST_BILLING_ACCOUNTS_PAGE_STR_DESC = new PagedListDescriptor< ListBillingAccountsRequest, ListBillingAccountsResponse, BillingAccount>() { @Override public String emptyToken() { return ""; } @Override public ListBillingAccountsRequest injectToken( ListBillingAccountsRequest payload, String token) { return ListBillingAccountsRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListBillingAccountsRequest injectPageSize( ListBillingAccountsRequest payload, int pageSize) { return ListBillingAccountsRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(ListBillingAccountsRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListBillingAccountsResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<BillingAccount> extractResources(ListBillingAccountsResponse payload) { return payload.getBillingAccountsList(); } }; private static final PagedListDescriptor< ListProjectBillingInfoRequest, ListProjectBillingInfoResponse, ProjectBillingInfo> LIST_PROJECT_BILLING_INFO_PAGE_STR_DESC = new PagedListDescriptor< ListProjectBillingInfoRequest, ListProjectBillingInfoResponse, ProjectBillingInfo>() { @Override public String emptyToken() { return ""; } @Override public ListProjectBillingInfoRequest injectToken( ListProjectBillingInfoRequest payload, String token) { return ListProjectBillingInfoRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListProjectBillingInfoRequest injectPageSize( ListProjectBillingInfoRequest payload, int pageSize) { return ListProjectBillingInfoRequest.newBuilder(payload) .setPageSize(pageSize) .build(); } @Override public Integer extractPageSize(ListProjectBillingInfoRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListProjectBillingInfoResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<ProjectBillingInfo> extractResources( ListProjectBillingInfoResponse payload) { return payload.getProjectBillingInfoList(); } }; private static final PagedListResponseFactory< ListBillingAccountsRequest, ListBillingAccountsResponse, ListBillingAccountsPagedResponse> LIST_BILLING_ACCOUNTS_PAGE_STR_FACT = new PagedListResponseFactory< ListBillingAccountsRequest, ListBillingAccountsResponse, ListBillingAccountsPagedResponse>() { @Override public ApiFuture<ListBillingAccountsPagedResponse> getFuturePagedResponse( UnaryCallable<ListBillingAccountsRequest, ListBillingAccountsResponse> callable, ListBillingAccountsRequest request, ApiCallContext context, ApiFuture<ListBillingAccountsResponse> futureResponse) { PageContext<ListBillingAccountsRequest, ListBillingAccountsResponse, BillingAccount> pageContext = PageContext.create( callable, LIST_BILLING_ACCOUNTS_PAGE_STR_DESC, request, context); return ListBillingAccountsPagedResponse.createAsync(pageContext, futureResponse); } }; private static final PagedListResponseFactory< ListProjectBillingInfoRequest, ListProjectBillingInfoResponse, ListProjectBillingInfoPagedResponse> LIST_PROJECT_BILLING_INFO_PAGE_STR_FACT = new PagedListResponseFactory< ListProjectBillingInfoRequest, ListProjectBillingInfoResponse, ListProjectBillingInfoPagedResponse>() { @Override public ApiFuture<ListProjectBillingInfoPagedResponse> getFuturePagedResponse( UnaryCallable<ListProjectBillingInfoRequest, ListProjectBillingInfoResponse> callable, ListProjectBillingInfoRequest request, ApiCallContext context, ApiFuture<ListProjectBillingInfoResponse> futureResponse) { PageContext< ListProjectBillingInfoRequest, ListProjectBillingInfoResponse, ProjectBillingInfo> pageContext = PageContext.create( callable, LIST_PROJECT_BILLING_INFO_PAGE_STR_DESC, request, context); return ListProjectBillingInfoPagedResponse.createAsync(pageContext, futureResponse); } }; /** Returns the object with the settings used for calls to getBillingAccount. */ public UnaryCallSettings<GetBillingAccountRequest, BillingAccount> getBillingAccountSettings() { return getBillingAccountSettings; } /** Returns the object with the settings used for calls to listBillingAccounts. */ public PagedCallSettings< ListBillingAccountsRequest, ListBillingAccountsResponse, ListBillingAccountsPagedResponse> listBillingAccountsSettings() { return listBillingAccountsSettings; } /** Returns the object with the settings used for calls to updateBillingAccount. */ public UnaryCallSettings<UpdateBillingAccountRequest, BillingAccount> updateBillingAccountSettings() { return updateBillingAccountSettings; } /** Returns the object with the settings used for calls to createBillingAccount. */ public UnaryCallSettings<CreateBillingAccountRequest, BillingAccount> createBillingAccountSettings() { return createBillingAccountSettings; } /** Returns the object with the settings used for calls to listProjectBillingInfo. */ public PagedCallSettings< ListProjectBillingInfoRequest, ListProjectBillingInfoResponse, ListProjectBillingInfoPagedResponse> listProjectBillingInfoSettings() { return listProjectBillingInfoSettings; } /** Returns the object with the settings used for calls to getProjectBillingInfo. */ public UnaryCallSettings<GetProjectBillingInfoRequest, ProjectBillingInfo> getProjectBillingInfoSettings() { return getProjectBillingInfoSettings; } /** Returns the object with the settings used for calls to updateProjectBillingInfo. */ public UnaryCallSettings<UpdateProjectBillingInfoRequest, ProjectBillingInfo> updateProjectBillingInfoSettings() { return updateProjectBillingInfoSettings; } /** Returns the object with the settings used for calls to getIamPolicy. */ public UnaryCallSettings<GetIamPolicyRequest, Policy> getIamPolicySettings() { return getIamPolicySettings; } /** Returns the object with the settings used for calls to setIamPolicy. */ public UnaryCallSettings<SetIamPolicyRequest, Policy> setIamPolicySettings() { return setIamPolicySettings; } /** Returns the object with the settings used for calls to testIamPermissions. */ public UnaryCallSettings<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsSettings() { return testIamPermissionsSettings; } /** Returns the object with the settings used for calls to moveBillingAccount. */ public UnaryCallSettings<MoveBillingAccountRequest, BillingAccount> moveBillingAccountSettings() { return moveBillingAccountSettings; } public CloudBillingStub createStub() throws IOException { if (getTransportChannelProvider() .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcCloudBillingStub.create(this); } if (getTransportChannelProvider() .getTransportName() .equals(HttpJsonTransportChannel.getHttpJsonTransportName())) { return HttpJsonCloudBillingStub.create(this); } throw new UnsupportedOperationException( String.format( "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns the default service name. */ @Override public String getServiceName() { return "cloudbilling"; } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstantiatingExecutorProvider.newBuilder(); } /** Returns the default service endpoint. */ @ObsoleteApi("Use getEndpoint() instead") public static String getDefaultEndpoint() { return "cloudbilling.googleapis.com:443"; } /** Returns the default mTLS service endpoint. */ public static String getDefaultMtlsEndpoint() { return "cloudbilling.mtls.googleapis.com:443"; } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return GoogleCredentialsProvider.newBuilder() .setScopesToApply(DEFAULT_SERVICE_SCOPES) .setUseJwtAccessWithScope(true); } /** Returns a builder for the default gRPC ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return InstantiatingGrpcChannelProvider.newBuilder() .setMaxInboundMessageSize(Integer.MAX_VALUE); } /** Returns a builder for the default REST ChannelProvider for this service. */ @BetaApi public static InstantiatingHttpJsonChannelProvider.Builder defaultHttpJsonTransportProviderBuilder() { return InstantiatingHttpJsonChannelProvider.newBuilder(); } public static TransportChannelProvider defaultTransportChannelProvider() { return defaultGrpcTransportProviderBuilder().build(); } public static ApiClientHeaderProvider.Builder defaultGrpcApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken( "gapic", GaxProperties.getLibraryVersion(CloudBillingStubSettings.class)) .setTransportToken( GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion()); } public static ApiClientHeaderProvider.Builder defaultHttpJsonApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken( "gapic", GaxProperties.getLibraryVersion(CloudBillingStubSettings.class)) .setTransportToken( GaxHttpJsonProperties.getHttpJsonTokenName(), GaxHttpJsonProperties.getHttpJsonVersion()); } public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return CloudBillingStubSettings.defaultGrpcApiClientHeaderProviderBuilder(); } /** Returns a new gRPC builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new REST builder for this class. */ public static Builder newHttpJsonBuilder() { return Builder.createHttpJsonDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected CloudBillingStubSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); getBillingAccountSettings = settingsBuilder.getBillingAccountSettings().build(); listBillingAccountsSettings = settingsBuilder.listBillingAccountsSettings().build(); updateBillingAccountSettings = settingsBuilder.updateBillingAccountSettings().build(); createBillingAccountSettings = settingsBuilder.createBillingAccountSettings().build(); listProjectBillingInfoSettings = settingsBuilder.listProjectBillingInfoSettings().build(); getProjectBillingInfoSettings = settingsBuilder.getProjectBillingInfoSettings().build(); updateProjectBillingInfoSettings = settingsBuilder.updateProjectBillingInfoSettings().build(); getIamPolicySettings = settingsBuilder.getIamPolicySettings().build(); setIamPolicySettings = settingsBuilder.setIamPolicySettings().build(); testIamPermissionsSettings = settingsBuilder.testIamPermissionsSettings().build(); moveBillingAccountSettings = settingsBuilder.moveBillingAccountSettings().build(); } /** Builder for CloudBillingStubSettings. */ public static class Builder extends StubSettings.Builder<CloudBillingStubSettings, Builder> { private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders; private final UnaryCallSettings.Builder<GetBillingAccountRequest, BillingAccount> getBillingAccountSettings; private final PagedCallSettings.Builder< ListBillingAccountsRequest, ListBillingAccountsResponse, ListBillingAccountsPagedResponse> listBillingAccountsSettings; private final UnaryCallSettings.Builder<UpdateBillingAccountRequest, BillingAccount> updateBillingAccountSettings; private final UnaryCallSettings.Builder<CreateBillingAccountRequest, BillingAccount> createBillingAccountSettings; private final PagedCallSettings.Builder< ListProjectBillingInfoRequest, ListProjectBillingInfoResponse, ListProjectBillingInfoPagedResponse> listProjectBillingInfoSettings; private final UnaryCallSettings.Builder<GetProjectBillingInfoRequest, ProjectBillingInfo> getProjectBillingInfoSettings; private final UnaryCallSettings.Builder<UpdateProjectBillingInfoRequest, ProjectBillingInfo> updateProjectBillingInfoSettings; private final UnaryCallSettings.Builder<GetIamPolicyRequest, Policy> getIamPolicySettings; private final UnaryCallSettings.Builder<SetIamPolicyRequest, Policy> setIamPolicySettings; private final UnaryCallSettings.Builder<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsSettings; private final UnaryCallSettings.Builder<MoveBillingAccountRequest, BillingAccount> moveBillingAccountSettings; private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions = ImmutableMap.builder(); definitions.put( "retry_policy_1_codes", ImmutableSet.copyOf( Lists.<StatusCode.Code>newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); definitions.put( "no_retry_0_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList())); definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList())); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder(); RetrySettings settings = null; settings = RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(100L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelayDuration(Duration.ofMillis(60000L)) .setInitialRpcTimeoutDuration(Duration.ofMillis(60000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ofMillis(60000L)) .setTotalTimeoutDuration(Duration.ofMillis(60000L)) .build(); definitions.put("retry_policy_1_params", settings); settings = RetrySettings.newBuilder() .setInitialRpcTimeoutDuration(Duration.ofMillis(60000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ofMillis(60000L)) .setTotalTimeoutDuration(Duration.ofMillis(60000L)) .build(); definitions.put("no_retry_0_params", settings); settings = RetrySettings.newBuilder().setRpcTimeoutMultiplier(1.0).build(); definitions.put("no_retry_params", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(clientContext); getBillingAccountSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); listBillingAccountsSettings = PagedCallSettings.newBuilder(LIST_BILLING_ACCOUNTS_PAGE_STR_FACT); updateBillingAccountSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); createBillingAccountSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); listProjectBillingInfoSettings = PagedCallSettings.newBuilder(LIST_PROJECT_BILLING_INFO_PAGE_STR_FACT); getProjectBillingInfoSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); updateProjectBillingInfoSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); getIamPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); setIamPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); testIamPermissionsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); moveBillingAccountSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( getBillingAccountSettings, listBillingAccountsSettings, updateBillingAccountSettings, createBillingAccountSettings, listProjectBillingInfoSettings, getProjectBillingInfoSettings, updateProjectBillingInfoSettings, getIamPolicySettings, setIamPolicySettings, testIamPermissionsSettings, moveBillingAccountSettings); initDefaults(this); } protected Builder(CloudBillingStubSettings settings) { super(settings); getBillingAccountSettings = settings.getBillingAccountSettings.toBuilder(); listBillingAccountsSettings = settings.listBillingAccountsSettings.toBuilder(); updateBillingAccountSettings = settings.updateBillingAccountSettings.toBuilder(); createBillingAccountSettings = settings.createBillingAccountSettings.toBuilder(); listProjectBillingInfoSettings = settings.listProjectBillingInfoSettings.toBuilder(); getProjectBillingInfoSettings = settings.getProjectBillingInfoSettings.toBuilder(); updateProjectBillingInfoSettings = settings.updateProjectBillingInfoSettings.toBuilder(); getIamPolicySettings = settings.getIamPolicySettings.toBuilder(); setIamPolicySettings = settings.setIamPolicySettings.toBuilder(); testIamPermissionsSettings = settings.testIamPermissionsSettings.toBuilder(); moveBillingAccountSettings = settings.moveBillingAccountSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( getBillingAccountSettings, listBillingAccountsSettings, updateBillingAccountSettings, createBillingAccountSettings, listProjectBillingInfoSettings, getProjectBillingInfoSettings, updateProjectBillingInfoSettings, getIamPolicySettings, setIamPolicySettings, testIamPermissionsSettings, moveBillingAccountSettings); } private static Builder createDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder createHttpJsonDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultHttpJsonTransportProviderBuilder().build()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultHttpJsonApiClientHeaderProviderBuilder().build()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder initDefaults(Builder builder) { builder .getBillingAccountSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .listBillingAccountsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .updateBillingAccountSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .createBillingAccountSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params")); builder .listProjectBillingInfoSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .getProjectBillingInfoSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .updateProjectBillingInfoSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .getIamPolicySettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .setIamPolicySettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .testIamPermissionsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .moveBillingAccountSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params")); return builder; } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); return this; } public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() { return unaryMethodSettingsBuilders; } /** Returns the builder for the settings used for calls to getBillingAccount. */ public UnaryCallSettings.Builder<GetBillingAccountRequest, BillingAccount> getBillingAccountSettings() { return getBillingAccountSettings; } /** Returns the builder for the settings used for calls to listBillingAccounts. */ public PagedCallSettings.Builder< ListBillingAccountsRequest, ListBillingAccountsResponse, ListBillingAccountsPagedResponse> listBillingAccountsSettings() { return listBillingAccountsSettings; } /** Returns the builder for the settings used for calls to updateBillingAccount. */ public UnaryCallSettings.Builder<UpdateBillingAccountRequest, BillingAccount> updateBillingAccountSettings() { return updateBillingAccountSettings; } /** Returns the builder for the settings used for calls to createBillingAccount. */ public UnaryCallSettings.Builder<CreateBillingAccountRequest, BillingAccount> createBillingAccountSettings() { return createBillingAccountSettings; } /** Returns the builder for the settings used for calls to listProjectBillingInfo. */ public PagedCallSettings.Builder< ListProjectBillingInfoRequest, ListProjectBillingInfoResponse, ListProjectBillingInfoPagedResponse> listProjectBillingInfoSettings() { return listProjectBillingInfoSettings; } /** Returns the builder for the settings used for calls to getProjectBillingInfo. */ public UnaryCallSettings.Builder<GetProjectBillingInfoRequest, ProjectBillingInfo> getProjectBillingInfoSettings() { return getProjectBillingInfoSettings; } /** Returns the builder for the settings used for calls to updateProjectBillingInfo. */ public UnaryCallSettings.Builder<UpdateProjectBillingInfoRequest, ProjectBillingInfo> updateProjectBillingInfoSettings() { return updateProjectBillingInfoSettings; } /** Returns the builder for the settings used for calls to getIamPolicy. */ public UnaryCallSettings.Builder<GetIamPolicyRequest, Policy> getIamPolicySettings() { return getIamPolicySettings; } /** Returns the builder for the settings used for calls to setIamPolicy. */ public UnaryCallSettings.Builder<SetIamPolicyRequest, Policy> setIamPolicySettings() { return setIamPolicySettings; } /** Returns the builder for the settings used for calls to testIamPermissions. */ public UnaryCallSettings.Builder<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsSettings() { return testIamPermissionsSettings; } /** Returns the builder for the settings used for calls to moveBillingAccount. */ public UnaryCallSettings.Builder<MoveBillingAccountRequest, BillingAccount> moveBillingAccountSettings() { return moveBillingAccountSettings; } @Override public CloudBillingStubSettings build() throws IOException { return new CloudBillingStubSettings(this); } } }
googleapis/google-cloud-java
35,231
java-financialservices/proto-google-cloud-financialservices-v1/src/main/java/com/google/cloud/financialservices/v1/ExportPredictionResultMetadataRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/financialservices/v1/prediction_result.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.financialservices.v1; /** * * * <pre> * Request for exporting PredictionResult metadata. * </pre> * * Protobuf type {@code google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest} */ public final class ExportPredictionResultMetadataRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest) ExportPredictionResultMetadataRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ExportPredictionResultMetadataRequest.newBuilder() to construct. private ExportPredictionResultMetadataRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ExportPredictionResultMetadataRequest() { predictionResult_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ExportPredictionResultMetadataRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.financialservices.v1.PredictionResultProto .internal_static_google_cloud_financialservices_v1_ExportPredictionResultMetadataRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.financialservices.v1.PredictionResultProto .internal_static_google_cloud_financialservices_v1_ExportPredictionResultMetadataRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest.class, com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest.Builder .class); } private int bitField0_; public static final int PREDICTION_RESULT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object predictionResult_ = ""; /** * * * <pre> * Required. The resource name of the PredictionResult. * </pre> * * <code> * string prediction_result = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The predictionResult. */ @java.lang.Override public java.lang.String getPredictionResult() { java.lang.Object ref = predictionResult_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); predictionResult_ = s; return s; } } /** * * * <pre> * Required. The resource name of the PredictionResult. * </pre> * * <code> * string prediction_result = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for predictionResult. */ @java.lang.Override public com.google.protobuf.ByteString getPredictionResultBytes() { java.lang.Object ref = predictionResult_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); predictionResult_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int STRUCTURED_METADATA_DESTINATION_FIELD_NUMBER = 2; private com.google.cloud.financialservices.v1.BigQueryDestination structuredMetadataDestination_; /** * * * <pre> * Required. BigQuery output where the metadata will be written. * </pre> * * <code> * .google.cloud.financialservices.v1.BigQueryDestination structured_metadata_destination = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the structuredMetadataDestination field is set. */ @java.lang.Override public boolean hasStructuredMetadataDestination() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. BigQuery output where the metadata will be written. * </pre> * * <code> * .google.cloud.financialservices.v1.BigQueryDestination structured_metadata_destination = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The structuredMetadataDestination. */ @java.lang.Override public com.google.cloud.financialservices.v1.BigQueryDestination getStructuredMetadataDestination() { return structuredMetadataDestination_ == null ? com.google.cloud.financialservices.v1.BigQueryDestination.getDefaultInstance() : structuredMetadataDestination_; } /** * * * <pre> * Required. BigQuery output where the metadata will be written. * </pre> * * <code> * .google.cloud.financialservices.v1.BigQueryDestination structured_metadata_destination = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.financialservices.v1.BigQueryDestinationOrBuilder getStructuredMetadataDestinationOrBuilder() { return structuredMetadataDestination_ == null ? com.google.cloud.financialservices.v1.BigQueryDestination.getDefaultInstance() : structuredMetadataDestination_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(predictionResult_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, predictionResult_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getStructuredMetadataDestination()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(predictionResult_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, predictionResult_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 2, getStructuredMetadataDestination()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest)) { return super.equals(obj); } com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest other = (com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest) obj; if (!getPredictionResult().equals(other.getPredictionResult())) return false; if (hasStructuredMetadataDestination() != other.hasStructuredMetadataDestination()) return false; if (hasStructuredMetadataDestination()) { if (!getStructuredMetadataDestination().equals(other.getStructuredMetadataDestination())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PREDICTION_RESULT_FIELD_NUMBER; hash = (53 * hash) + getPredictionResult().hashCode(); if (hasStructuredMetadataDestination()) { hash = (37 * hash) + STRUCTURED_METADATA_DESTINATION_FIELD_NUMBER; hash = (53 * hash) + getStructuredMetadataDestination().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request for exporting PredictionResult metadata. * </pre> * * Protobuf type {@code google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest) com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.financialservices.v1.PredictionResultProto .internal_static_google_cloud_financialservices_v1_ExportPredictionResultMetadataRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.financialservices.v1.PredictionResultProto .internal_static_google_cloud_financialservices_v1_ExportPredictionResultMetadataRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest.class, com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest.Builder .class); } // Construct using // com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getStructuredMetadataDestinationFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; predictionResult_ = ""; structuredMetadataDestination_ = null; if (structuredMetadataDestinationBuilder_ != null) { structuredMetadataDestinationBuilder_.dispose(); structuredMetadataDestinationBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.financialservices.v1.PredictionResultProto .internal_static_google_cloud_financialservices_v1_ExportPredictionResultMetadataRequest_descriptor; } @java.lang.Override public com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest getDefaultInstanceForType() { return com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest build() { com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest buildPartial() { com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest result = new com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.predictionResult_ = predictionResult_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.structuredMetadataDestination_ = structuredMetadataDestinationBuilder_ == null ? structuredMetadataDestination_ : structuredMetadataDestinationBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest) { return mergeFrom( (com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest other) { if (other == com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest .getDefaultInstance()) return this; if (!other.getPredictionResult().isEmpty()) { predictionResult_ = other.predictionResult_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasStructuredMetadataDestination()) { mergeStructuredMetadataDestination(other.getStructuredMetadataDestination()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { predictionResult_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getStructuredMetadataDestinationFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object predictionResult_ = ""; /** * * * <pre> * Required. The resource name of the PredictionResult. * </pre> * * <code> * string prediction_result = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The predictionResult. */ public java.lang.String getPredictionResult() { java.lang.Object ref = predictionResult_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); predictionResult_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The resource name of the PredictionResult. * </pre> * * <code> * string prediction_result = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for predictionResult. */ public com.google.protobuf.ByteString getPredictionResultBytes() { java.lang.Object ref = predictionResult_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); predictionResult_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The resource name of the PredictionResult. * </pre> * * <code> * string prediction_result = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The predictionResult to set. * @return This builder for chaining. */ public Builder setPredictionResult(java.lang.String value) { if (value == null) { throw new NullPointerException(); } predictionResult_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The resource name of the PredictionResult. * </pre> * * <code> * string prediction_result = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearPredictionResult() { predictionResult_ = getDefaultInstance().getPredictionResult(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The resource name of the PredictionResult. * </pre> * * <code> * string prediction_result = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for predictionResult to set. * @return This builder for chaining. */ public Builder setPredictionResultBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); predictionResult_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.cloud.financialservices.v1.BigQueryDestination structuredMetadataDestination_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.financialservices.v1.BigQueryDestination, com.google.cloud.financialservices.v1.BigQueryDestination.Builder, com.google.cloud.financialservices.v1.BigQueryDestinationOrBuilder> structuredMetadataDestinationBuilder_; /** * * * <pre> * Required. BigQuery output where the metadata will be written. * </pre> * * <code> * .google.cloud.financialservices.v1.BigQueryDestination structured_metadata_destination = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the structuredMetadataDestination field is set. */ public boolean hasStructuredMetadataDestination() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. BigQuery output where the metadata will be written. * </pre> * * <code> * .google.cloud.financialservices.v1.BigQueryDestination structured_metadata_destination = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The structuredMetadataDestination. */ public com.google.cloud.financialservices.v1.BigQueryDestination getStructuredMetadataDestination() { if (structuredMetadataDestinationBuilder_ == null) { return structuredMetadataDestination_ == null ? com.google.cloud.financialservices.v1.BigQueryDestination.getDefaultInstance() : structuredMetadataDestination_; } else { return structuredMetadataDestinationBuilder_.getMessage(); } } /** * * * <pre> * Required. BigQuery output where the metadata will be written. * </pre> * * <code> * .google.cloud.financialservices.v1.BigQueryDestination structured_metadata_destination = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setStructuredMetadataDestination( com.google.cloud.financialservices.v1.BigQueryDestination value) { if (structuredMetadataDestinationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } structuredMetadataDestination_ = value; } else { structuredMetadataDestinationBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. BigQuery output where the metadata will be written. * </pre> * * <code> * .google.cloud.financialservices.v1.BigQueryDestination structured_metadata_destination = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setStructuredMetadataDestination( com.google.cloud.financialservices.v1.BigQueryDestination.Builder builderForValue) { if (structuredMetadataDestinationBuilder_ == null) { structuredMetadataDestination_ = builderForValue.build(); } else { structuredMetadataDestinationBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. BigQuery output where the metadata will be written. * </pre> * * <code> * .google.cloud.financialservices.v1.BigQueryDestination structured_metadata_destination = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeStructuredMetadataDestination( com.google.cloud.financialservices.v1.BigQueryDestination value) { if (structuredMetadataDestinationBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && structuredMetadataDestination_ != null && structuredMetadataDestination_ != com.google.cloud.financialservices.v1.BigQueryDestination.getDefaultInstance()) { getStructuredMetadataDestinationBuilder().mergeFrom(value); } else { structuredMetadataDestination_ = value; } } else { structuredMetadataDestinationBuilder_.mergeFrom(value); } if (structuredMetadataDestination_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. BigQuery output where the metadata will be written. * </pre> * * <code> * .google.cloud.financialservices.v1.BigQueryDestination structured_metadata_destination = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearStructuredMetadataDestination() { bitField0_ = (bitField0_ & ~0x00000002); structuredMetadataDestination_ = null; if (structuredMetadataDestinationBuilder_ != null) { structuredMetadataDestinationBuilder_.dispose(); structuredMetadataDestinationBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. BigQuery output where the metadata will be written. * </pre> * * <code> * .google.cloud.financialservices.v1.BigQueryDestination structured_metadata_destination = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.financialservices.v1.BigQueryDestination.Builder getStructuredMetadataDestinationBuilder() { bitField0_ |= 0x00000002; onChanged(); return getStructuredMetadataDestinationFieldBuilder().getBuilder(); } /** * * * <pre> * Required. BigQuery output where the metadata will be written. * </pre> * * <code> * .google.cloud.financialservices.v1.BigQueryDestination structured_metadata_destination = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.financialservices.v1.BigQueryDestinationOrBuilder getStructuredMetadataDestinationOrBuilder() { if (structuredMetadataDestinationBuilder_ != null) { return structuredMetadataDestinationBuilder_.getMessageOrBuilder(); } else { return structuredMetadataDestination_ == null ? com.google.cloud.financialservices.v1.BigQueryDestination.getDefaultInstance() : structuredMetadataDestination_; } } /** * * * <pre> * Required. BigQuery output where the metadata will be written. * </pre> * * <code> * .google.cloud.financialservices.v1.BigQueryDestination structured_metadata_destination = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.financialservices.v1.BigQueryDestination, com.google.cloud.financialservices.v1.BigQueryDestination.Builder, com.google.cloud.financialservices.v1.BigQueryDestinationOrBuilder> getStructuredMetadataDestinationFieldBuilder() { if (structuredMetadataDestinationBuilder_ == null) { structuredMetadataDestinationBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.financialservices.v1.BigQueryDestination, com.google.cloud.financialservices.v1.BigQueryDestination.Builder, com.google.cloud.financialservices.v1.BigQueryDestinationOrBuilder>( getStructuredMetadataDestination(), getParentForChildren(), isClean()); structuredMetadataDestination_ = null; } return structuredMetadataDestinationBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest) } // @@protoc_insertion_point(class_scope:google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest) private static final com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest(); } public static com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ExportPredictionResultMetadataRequest> PARSER = new com.google.protobuf.AbstractParser<ExportPredictionResultMetadataRequest>() { @java.lang.Override public ExportPredictionResultMetadataRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ExportPredictionResultMetadataRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ExportPredictionResultMetadataRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.financialservices.v1.ExportPredictionResultMetadataRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/helix
35,176
helix-core/src/main/java/org/apache/helix/ConfigAccessor.java
package org.apache.helix; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.TreeMap; import org.apache.helix.manager.zk.GenericZkHelixApiBuilder; import org.apache.helix.manager.zk.ZKUtil; import org.apache.helix.model.CloudConfig; import org.apache.helix.model.ClusterConfig; import org.apache.helix.model.ConfigScope; import org.apache.helix.model.CustomizedStateConfig; import org.apache.helix.model.HelixConfigScope; import org.apache.helix.model.HelixConfigScope.ConfigScopeProperty; import org.apache.helix.model.InstanceConfig; import org.apache.helix.model.RESTConfig; import org.apache.helix.model.ResourceConfig; import org.apache.helix.model.builder.HelixConfigScopeBuilder; import org.apache.helix.msdcommon.exception.InvalidRoutingDataException; import org.apache.helix.util.HelixUtil; import org.apache.helix.util.StringTemplate; import org.apache.helix.zookeeper.api.client.HelixZkClient; import org.apache.helix.zookeeper.api.client.RealmAwareZkClient; import org.apache.helix.zookeeper.datamodel.ZNRecord; import org.apache.helix.zookeeper.datamodel.serializer.ZNRecordSerializer; import org.apache.helix.zookeeper.impl.client.FederatedZkClient; import org.apache.helix.zookeeper.impl.factory.SharedZkClientFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Provides access to the persistent configuration of the cluster, the instances that live on it, * and the logical resources assigned to it. */ public class ConfigAccessor { private static Logger LOG = LoggerFactory.getLogger(ConfigAccessor.class); private static final StringTemplate template = new StringTemplate(); static { // @formatter:off template.addEntry(ConfigScopeProperty.CLUSTER, 1, "/{clusterName}/CONFIGS/CLUSTER"); template.addEntry(ConfigScopeProperty.CLUSTER, 2, "/{clusterName}/CONFIGS/CLUSTER/{clusterName}|SIMPLEKEYS"); template.addEntry(ConfigScopeProperty.PARTICIPANT, 1, "/{clusterName}/CONFIGS/PARTICIPANT"); template.addEntry(ConfigScopeProperty.PARTICIPANT, 2, "/{clusterName}/CONFIGS/PARTICIPANT/{participantName}|SIMPLEKEYS"); template.addEntry(ConfigScopeProperty.RESOURCE, 1, "/{clusterName}/CONFIGS/RESOURCE"); template.addEntry(ConfigScopeProperty.RESOURCE, 2, "/{clusterName}/CONFIGS/RESOURCE/{resourceName}|SIMPLEKEYS"); template.addEntry(ConfigScopeProperty.PARTITION, 2, "/{clusterName}/CONFIGS/RESOURCE/{resourceName}|MAPKEYS"); template.addEntry(ConfigScopeProperty.PARTITION, 3, "/{clusterName}/CONFIGS/RESOURCE/{resourceName}|MAPMAPKEYS|{partitionName}"); // @formatter:on } private final RealmAwareZkClient _zkClient; // true if ConfigAccessor was instantiated with a HelixZkClient, false otherwise // This is used for close() to determine how ConfigAccessor should close the underlying ZkClient private final boolean _usesExternalZkClient; private ConfigAccessor(RealmAwareZkClient zkClient, boolean usesExternalZkClient) { _zkClient = zkClient; _usesExternalZkClient = usesExternalZkClient; } /** * Initialize an accessor with a Zookeeper client * Note: it is recommended to use the other constructor instead to avoid having to create a * RealmAwareZkClient. * @param zkClient */ @Deprecated public ConfigAccessor(RealmAwareZkClient zkClient) { _zkClient = zkClient; _usesExternalZkClient = true; } /** * Initialize a ConfigAccessor with a ZooKeeper connect string. It will use a SharedZkClient with * default settings. Note that ZNRecordSerializer will be used for the internal ZkClient since * ConfigAccessor only deals with Helix's data models like ResourceConfig. * @param zkAddress */ @Deprecated public ConfigAccessor(String zkAddress) { _usesExternalZkClient = false; // If the multi ZK config is enabled, use FederatedZkClient on multi-realm mode if (Boolean.getBoolean(SystemPropertyKeys.MULTI_ZK_ENABLED) || zkAddress == null) { try { _zkClient = new FederatedZkClient( new RealmAwareZkClient.RealmAwareZkConnectionConfig.Builder().build(), new RealmAwareZkClient.RealmAwareZkClientConfig() .setZkSerializer(new ZNRecordSerializer())); return; } catch (InvalidRoutingDataException | IllegalStateException e) { throw new HelixException("Failed to create ConfigAccessor!", e); } } _zkClient = SharedZkClientFactory.getInstance() .buildZkClient(new HelixZkClient.ZkConnectionConfig(zkAddress), new HelixZkClient.ZkClientConfig().setZkSerializer(new ZNRecordSerializer())); } /** * get config * @deprecated replaced by {@link #get(HelixConfigScope, String)} * @param scope * @param key * @return value or null if doesn't exist */ @Deprecated public String get(ConfigScope scope, String key) { Map<String, String> map = get(scope, Arrays.asList(key)); return map.get(key); } /** * get configs * @deprecated replaced by {@link #get(HelixConfigScope, List<String>)} * @param scope * @param keys * @return */ @Deprecated public Map<String, String> get(ConfigScope scope, List<String> keys) { if (scope == null || scope.getScope() == null) { LOG.error("Scope can't be null"); return null; } // String value = null; Map<String, String> map = new HashMap<String, String>(); String clusterName = scope.getClusterName(); if (!ZKUtil.isClusterSetup(clusterName, _zkClient)) { throw new HelixException("cluster " + clusterName + " is not setup yet"); } String scopeStr = scope.getScopeStr(); String[] splits = scopeStr.split("\\|"); ZNRecord record = _zkClient.readData(splits[0], true); if (record != null) { if (splits.length == 1) { for (String key : keys) { if (record.getSimpleFields().containsKey(key)) { map.put(key, record.getSimpleField(key)); } } } else if (splits.length == 2) { if (record.getMapField(splits[1]) != null) { for (String key : keys) { if (record.getMapField(splits[1]).containsKey(key)) { map.put(key, record.getMapField(splits[1]).get(key)); } } } } } return map; } /** * get a single config entry * @param scope specification of the entity set to query * (e.g. cluster, resource, participant, etc.) * @param key the identifier of the configuration entry * @return the configuration entry */ public String get(HelixConfigScope scope, String key) { Map<String, String> map = get(scope, Arrays.asList(key)); if (map != null) { return map.get(key); } return null; } /** * get many config entries * @param scope scope specification of the entity set to query * (e.g. cluster, resource, participant, etc.) * @param keys the identifiers of the configuration entries * @return the configuration entries, organized by key */ public Map<String, String> get(HelixConfigScope scope, List<String> keys) { if (scope == null || scope.getType() == null || !scope.isFullKey()) { LOG.error("fail to get configs. invalid config scope. scope: {}, keys: {}.", scope, keys); return null; } ZNRecord record = getConfigZnRecord(scope); if (record == null) { LOG.warn("No config found at {}.", scope.getZkPath()); return null; } Map<String, String> map = new HashMap<String, String>(); String mapKey = scope.getMapKey(); if (mapKey == null) { for (String key : keys) { if (record.getSimpleFields().containsKey(key)) { map.put(key, record.getSimpleField(key)); } } } else { Map<String, String> configMap = record.getMapField(mapKey); if (configMap == null) { LOG.warn("No map-field found in {} using mapKey: {}.", record, mapKey); return null; } for (String key : keys) { if (record.getMapField(mapKey).containsKey(key)) { map.put(key, record.getMapField(mapKey).get(key)); } } } return map; } private ZNRecord getConfigZnRecord(HelixConfigScope scope) { String clusterName = scope.getClusterName(); if (!ZKUtil.isClusterSetup(clusterName, _zkClient)) { throw new HelixException("fail to get configs. cluster " + clusterName + " is not setup yet"); } return _zkClient.readData(scope.getZkPath(), true); } /** * Set config, create if not exist * @deprecated replaced by {@link #set(HelixConfigScope, String, String)} * @param scope * @param key * @param value */ @Deprecated public void set(ConfigScope scope, String key, String value) { Map<String, String> map = new HashMap<String, String>(); map.put(key, value); set(scope, map); } /** * Set configs, create if not exist * @deprecated replaced by {@link #set(HelixConfigScope, Map<String, String>)} * @param scope * @param keyValueMap */ @Deprecated public void set(ConfigScope scope, Map<String, String> keyValueMap) { if (scope == null || scope.getScope() == null) { LOG.error("Scope can't be null."); return; } String clusterName = scope.getClusterName(); if (!ZKUtil.isClusterSetup(clusterName, _zkClient)) { throw new HelixException("cluster: " + clusterName + " is NOT setup."); } if (scope.getScope() == ConfigScopeProperty.PARTICIPANT) { String scopeStr = scope.getScopeStr(); String instanceName = scopeStr.substring(scopeStr.lastIndexOf('/') + 1); if (!ZKUtil.isInstanceSetup(_zkClient, scope.getClusterName(), instanceName, InstanceType.PARTICIPANT)) { throw new HelixException( "instance: " + instanceName + " is NOT setup in cluster: " + clusterName); } } // use "|" to delimit resource and partition. e.g. /MyCluster/CONFIGS/PARTICIPANT/MyDB|MyDB_0 String scopeStr = scope.getScopeStr(); String[] splits = scopeStr.split("\\|"); String id = splits[0].substring(splits[0].lastIndexOf('/') + 1); ZNRecord update = new ZNRecord(id); if (splits.length == 1) { for (String key : keyValueMap.keySet()) { String value = keyValueMap.get(key); update.setSimpleField(key, value); } } else if (splits.length == 2) { if (update.getMapField(splits[1]) == null) { update.setMapField(splits[1], new TreeMap<String, String>()); } for (String key : keyValueMap.keySet()) { String value = keyValueMap.get(key); update.getMapField(splits[1]).put(key, value); } } ZKUtil.createOrMerge(_zkClient, splits[0], update, true, true); } /** * Set config, creating it if it doesn't exist * @param scope scope specification of the entity set to query * (e.g. cluster, resource, participant, etc.) * @param key the identifier of the configuration entry * @param value the configuration */ public void set(HelixConfigScope scope, String key, String value) { Map<String, String> map = new TreeMap<String, String>(); map.put(key, value); set(scope, map); } /** * Set multiple configs, creating them if they don't exist * @param scope scope specification of the entity set to query * (e.g. cluster, resource, participant, etc.) * @param keyValueMap configurations organized by their identifiers */ public void set(HelixConfigScope scope, Map<String, String> keyValueMap) { if (scope == null || scope.getType() == null || !scope.isFullKey()) { LOG.error("fail to set config. invalid config scope. Scope: {}.", scope); return; } String clusterName = scope.getClusterName(); if (!ZKUtil.isClusterSetup(clusterName, _zkClient)) { throw new HelixException("fail to set config. cluster: " + clusterName + " is NOT setup."); } if (scope.getType() == ConfigScopeProperty.PARTICIPANT) { if (!ZKUtil.isInstanceSetup(_zkClient, scope.getClusterName(), scope.getParticipantName(), InstanceType.PARTICIPANT)) { throw new HelixException("fail to set config. instance: " + scope.getParticipantName() + " is NOT setup in cluster: " + clusterName); } } String mapKey = scope.getMapKey(); String zkPath = scope.getZkPath(); String id = zkPath.substring(zkPath.lastIndexOf('/') + 1); ZNRecord update = new ZNRecord(id); if (mapKey == null) { update.getSimpleFields().putAll(keyValueMap); } else { update.setMapField(mapKey, keyValueMap); } ZKUtil.createOrMerge(_zkClient, zkPath, update, true, true); } /** * Remove config * @deprecated replaced by {@link #remove(HelixConfigScope, String)} * @param scope * @param key */ @Deprecated public void remove(ConfigScope scope, String key) { remove(scope, Arrays.asList(key)); } /** * remove configs * @deprecated replaced by {@link #remove(HelixConfigScope, List<String>)} * @param scope * @param keys */ @Deprecated public void remove(ConfigScope scope, List<String> keys) { if (scope == null || scope.getScope() == null) { LOG.error("Scope can't be null."); return; } String clusterName = scope.getClusterName(); if (!ZKUtil.isClusterSetup(clusterName, _zkClient)) { throw new HelixException("cluster " + clusterName + " is not setup yet"); } String scopeStr = scope.getScopeStr(); String[] splits = scopeStr.split("\\|"); String id = splits[0].substring(splits[0].lastIndexOf('/') + 1); ZNRecord update = new ZNRecord(id); if (splits.length == 1) { // subtract doesn't care about value, use empty string for (String key : keys) { update.setSimpleField(key, ""); } } else if (splits.length == 2) { if (update.getMapField(splits[1]) == null) { update.setMapField(splits[1], new TreeMap<String, String>()); } // subtract doesn't care about value, use empty string for (String key : keys) { update.getMapField(splits[1]).put(key, ""); } } ZKUtil.subtract(_zkClient, splits[0], update); } /** * Remove a single config * @param scope scope specification of the entity set to query * (e.g. cluster, resource, participant, etc.) * @param key the identifier of the configuration entry */ public void remove(HelixConfigScope scope, String key) { remove(scope, Arrays.asList(key)); } /** * Remove multiple configs * @param scope scope specification of the entity set to query * (e.g. cluster, resource, participant, etc.) * @param keys the identifiers of the configuration entries */ public void remove(HelixConfigScope scope, List<String> keys) { if (scope == null || scope.getType() == null || !scope.isFullKey()) { LOG.error("fail to remove. invalid scope: {}, keys: {}", scope, keys); return; } String clusterName = scope.getClusterName(); if (!ZKUtil.isClusterSetup(clusterName, _zkClient)) { throw new HelixException("fail to remove. cluster " + clusterName + " is not setup yet"); } String zkPath = scope.getZkPath(); String mapKey = scope.getMapKey(); String id = zkPath.substring(zkPath.lastIndexOf('/') + 1); ZNRecord update = new ZNRecord(id); if (mapKey == null) { // subtract doesn't care about value, use empty string for (String key : keys) { update.setSimpleField(key, ""); } } else { update.setMapField(mapKey, new TreeMap<String, String>()); // subtract doesn't care about value, use empty string for (String key : keys) { update.getMapField(mapKey).put(key, ""); } } ZKUtil.subtract(_zkClient, zkPath, update); } /** * Remove multiple configs * * @param scope scope specification of the entity set to query (e.g. cluster, resource, * participant, etc.) * @param recordToRemove the ZNRecord that holds the entries that needs to be removed */ public void remove(HelixConfigScope scope, ZNRecord recordToRemove) { if (scope == null || scope.getType() == null || !scope.isFullKey()) { LOG.error("fail to remove. invalid scope: {}.", scope); return; } String clusterName = scope.getClusterName(); if (!ZKUtil.isClusterSetup(clusterName, _zkClient)) { throw new HelixException("fail to remove. cluster " + clusterName + " is not setup yet"); } String zkPath = scope.getZkPath(); ZKUtil.subtract(_zkClient, zkPath, recordToRemove); } /** * get config keys * @deprecated replaced by {@link #getKeys(HelixConfigScope)} * @param type * @param clusterName * @param keys * @return */ @Deprecated public List<String> getKeys(ConfigScopeProperty type, String clusterName, String... keys) { if (type == null || clusterName == null) { LOG.error("ClusterName|scope can't be null."); return Collections.emptyList(); } try { if (!ZKUtil.isClusterSetup(clusterName, _zkClient)) { LOG.error("cluster {} is not setup yet.", clusterName); return Collections.emptyList(); } String[] args = new String[1 + keys.length]; args[0] = clusterName; System.arraycopy(keys, 0, args, 1, keys.length); String scopeStr = template.instantiate(type, args); String[] splits = scopeStr.split("\\|"); List<String> retKeys = null; if (splits.length == 1) { retKeys = _zkClient.getChildren(splits[0]); } else { ZNRecord record = _zkClient.readData(splits[0]); if (splits[1].startsWith("SIMPLEKEYS")) { retKeys = new ArrayList<String>(record.getSimpleFields().keySet()); } else if (splits[1].startsWith("MAPKEYS")) { retKeys = new ArrayList<String>(record.getMapFields().keySet()); } else if (splits[1].startsWith("MAPMAPKEYS")) { retKeys = new ArrayList<String>(record.getMapField(splits[2]).keySet()); } } if (retKeys == null) { LOG.error("Invalid scope: {} or keys: {}.", type, Arrays.toString(args)); return Collections.emptyList(); } Collections.sort(retKeys); return retKeys; } catch (Exception e) { return Collections.emptyList(); } } /** * Get list of config keys for a scope * @param scope * @return a list of configuration keys */ public List<String> getKeys(HelixConfigScope scope) { if (scope == null || scope.getType() == null) { LOG.error("Fail to getKeys. Invalid config scope: {}.", scope); return null; } if (!ZKUtil.isClusterSetup(scope.getClusterName(), _zkClient)) { LOG.error("Fail to getKeys. Cluster {} is not setup yet.", scope.getClusterName()); return Collections.emptyList(); } String zkPath = scope.getZkPath(); String mapKey = scope.getMapKey(); List<String> retKeys = null; if (scope.isFullKey()) { ZNRecord record = _zkClient.readData(zkPath); if (mapKey == null) { retKeys = new ArrayList<String>(record.getSimpleFields().keySet()); } else { retKeys = new ArrayList<String>(record.getMapField(mapKey).keySet()); } } else { if (scope.getType() == ConfigScopeProperty.PARTITION) { ZNRecord record = _zkClient.readData(zkPath); retKeys = new ArrayList<String>(record.getMapFields().keySet()); } else { retKeys = _zkClient.getChildren(zkPath); } } if (retKeys != null) { Collections.sort(retKeys); } return retKeys; } /** * Get CustomizedStateConfig of the given cluster. * @param clusterName * @return The instance of {@link CustomizedStateConfig} */ public CustomizedStateConfig getCustomizedStateConfig(String clusterName) { if (!ZKUtil.isClusterSetup(clusterName, _zkClient)) { throw new HelixException(String.format("Failed to get config. cluster: %s is not setup.", clusterName)); } HelixConfigScope scope = new HelixConfigScopeBuilder(ConfigScopeProperty.CUSTOMIZED_STATE).forCluster(clusterName).build(); ZNRecord record = getConfigZnRecord(scope); if (record == null) { LOG.warn("No customized state aggregation config found at {}.", scope.getZkPath()); return null; } return new CustomizedStateConfig.Builder(record).build(); } /** * Get ClusterConfig of the given cluster. * * @param clusterName * * @return */ public ClusterConfig getClusterConfig(String clusterName) { if (!ZKUtil.isClusterSetup(clusterName, _zkClient)) { throw new HelixException("fail to get config. cluster: " + clusterName + " is NOT setup."); } HelixConfigScope scope = new HelixConfigScopeBuilder(ConfigScopeProperty.CLUSTER).forCluster(clusterName).build(); ZNRecord record = getConfigZnRecord(scope); if (record == null) { LOG.warn("No config found at {}.", scope.getZkPath()); return null; } return new ClusterConfig(record); } /** * Get CloudConfig of the given cluster. * @param clusterName * @return The instance of {@link CloudConfig} */ public CloudConfig getCloudConfig(String clusterName) { if (!ZKUtil.isClusterSetup(clusterName, _zkClient)) { throw new HelixException( String.format("Failed to get config. cluster: %s is not setup.", clusterName)); } HelixConfigScope scope = new HelixConfigScopeBuilder(ConfigScopeProperty.CLOUD).forCluster(clusterName).build(); ZNRecord record = getConfigZnRecord(scope); if (record == null) { LOG.warn("No cloud config found at {}.", scope.getZkPath()); return null; } return new CloudConfig(record); } /** * Delete cloud config fields (not the whole config) * @param clusterName * @param cloudConfig */ public void deleteCloudConfigFields(String clusterName, CloudConfig cloudConfig) { if (!ZKUtil.isClusterSetup(clusterName, _zkClient)) { throw new HelixException("fail to delete cloud config. cluster: " + clusterName + " is NOT setup."); } HelixConfigScope scope = new HelixConfigScopeBuilder(ConfigScopeProperty.CLOUD).forCluster(clusterName).build(); remove(scope, cloudConfig.getRecord()); } /** * Update cloud config * @param clusterName * @param cloudConfig */ public void updateCloudConfig(String clusterName, CloudConfig cloudConfig) { updateCloudConfig(clusterName, cloudConfig, false); } private void updateCloudConfig(String clusterName, CloudConfig cloudConfig, boolean overwrite) { if (!ZKUtil.isClusterSetup(clusterName, _zkClient)) { throw new HelixException("Fail to update cloud config. cluster: " + clusterName + " is NOT setup."); } HelixConfigScope scope = new HelixConfigScopeBuilder(ConfigScopeProperty.CLOUD).forCluster(clusterName).build(); String zkPath = scope.getZkPath(); if (overwrite) { ZKUtil.createOrReplace(_zkClient, zkPath, cloudConfig.getRecord(), true); } else { ZKUtil.createOrUpdate(_zkClient, zkPath, cloudConfig.getRecord(), true, true); } } /** * Get RestConfig of the given cluster. * @param clusterName The cluster * @return The instance of {@link RESTConfig} */ public RESTConfig getRESTConfig(String clusterName) { HelixConfigScope scope = new HelixConfigScopeBuilder(ConfigScopeProperty.REST).forCluster(clusterName).build(); ZNRecord record = getConfigZnRecord(scope); if (record == null) { LOG.warn("No rest config found at {}.", scope.getZkPath()); return null; } return new RESTConfig(record); } /** * Set RestConfig of a given cluster * @param clusterName the cluster id * @param restConfig the RestConfig to be set to the cluster */ public void setRESTConfig(String clusterName, RESTConfig restConfig) { updateRESTConfig(clusterName, restConfig, true); } /** * Update RestConfig of a given cluster * @param clusterName the cluster id * @param restConfig the new RestConfig to be set to the cluster */ public void updateRESTConfig(String clusterName, RESTConfig restConfig) { updateRESTConfig(clusterName, restConfig, false); } private void updateRESTConfig(String clusterName, RESTConfig restConfig, boolean overwrite) { if (!ZKUtil.isClusterSetup(clusterName, _zkClient)) { throw new HelixException("Fail to update REST config. cluster: " + clusterName + " is NOT setup."); } HelixConfigScope scope = new HelixConfigScopeBuilder(ConfigScopeProperty.REST).forCluster(clusterName).build(); String zkPath = scope.getZkPath(); // Create "/{clusterId}/CONFIGS/REST" if it does not exist String parentPath = HelixUtil.getZkParentPath(zkPath); if (!_zkClient.exists(parentPath)) { ZKUtil.createOrMerge(_zkClient, parentPath, new ZNRecord(parentPath), true, true); } if (overwrite) { ZKUtil.createOrReplace(_zkClient, zkPath, restConfig.getRecord(), true); } else { ZKUtil.createOrUpdate(_zkClient, zkPath, restConfig.getRecord(), true, true); } } public void deleteRESTConfig(String clusterName) { if (!ZKUtil.isClusterSetup(clusterName, _zkClient)) { throw new HelixException("Fail to delete REST config. cluster: " + clusterName + " is NOT setup."); } HelixConfigScope scope = new HelixConfigScopeBuilder(ConfigScopeProperty.REST).forCluster(clusterName).build(); String zkPath = scope.getZkPath(); // Check if "/{clusterId}/CONFIGS/REST" exists String parentPath = HelixUtil.getZkParentPath(zkPath); if (!_zkClient.exists(parentPath)) { throw new HelixException("Fail to delete REST config. cluster: " + clusterName + " does not have a rest config."); } ZKUtil.dropChildren(_zkClient, parentPath, new ZNRecord(clusterName)); } /** * Set ClusterConfig of the given cluster. * The current Cluster config will be replaced with the given clusterConfig. * WARNING: This is not thread-safe or concurrent updates safe. * * @param clusterName * @param clusterConfig * * @return */ public void setClusterConfig(String clusterName, ClusterConfig clusterConfig) { updateClusterConfig(clusterName, clusterConfig, true); } /** * Update ClusterConfig of the given cluster. * The value of field in current config will be replaced with the value of the same field in given config if it * presents. If there is new field in given config but not in current config, the field will be added into * the current config.. * The list fields and map fields will be replaced as a single entry. * * The current Cluster config will be replaced with the given clusterConfig. * WARNING: This is not thread-safe or concurrent updates safe. * * @param clusterName * @param clusterConfig * * @return */ public void updateClusterConfig(String clusterName, ClusterConfig clusterConfig) { updateClusterConfig(clusterName, clusterConfig, false); } private void updateClusterConfig(String clusterName, ClusterConfig clusterConfig, boolean overwrite) { if (!ZKUtil.isClusterSetup(clusterName, _zkClient)) { throw new HelixException("fail to update config. cluster: " + clusterName + " is NOT setup."); } HelixConfigScope scope = new HelixConfigScopeBuilder(ConfigScopeProperty.CLUSTER).forCluster(clusterName).build(); String zkPath = scope.getZkPath(); if (overwrite) { ZKUtil.createOrReplace(_zkClient, zkPath, clusterConfig.getRecord(), true); } else { ZKUtil.createOrUpdate(_zkClient, zkPath, clusterConfig.getRecord(), true, true); } } /** * Get resource config for given resource in given cluster. * * @param clusterName * @param resourceName * * @return */ public ResourceConfig getResourceConfig(String clusterName, String resourceName) { HelixConfigScope scope = new HelixConfigScopeBuilder(ConfigScopeProperty.RESOURCE).forCluster(clusterName) .forResource(resourceName).build(); ZNRecord record = getConfigZnRecord(scope); if (record == null) { LOG.warn("No config found at {}.", scope.getZkPath()); return null; } return new ResourceConfig(record); } /** * Set config of the given resource. * The current Resource config will be replaced with the given clusterConfig. * * WARNING: This is not thread-safe or concurrent updates safe. * * @param clusterName * @param resourceName * @param resourceConfig * * @return */ public void setResourceConfig(String clusterName, String resourceName, ResourceConfig resourceConfig) { updateResourceConfig(clusterName, resourceName, resourceConfig, true); } /** * Update ResourceConfig of the given resource. * The value of field in current config will be replaced with the value of the same field in given config if it * presents. If there is new field in given config but not in current config, the field will be added into * the current config.. * The list fields and map fields will be replaced as a single entry. * * The current Cluster config will be replaced with the given clusterConfig. * WARNING: This is not thread-safe or concurrent updates safe. * * @param clusterName * @param resourceName * @param resourceConfig * * @return */ public void updateResourceConfig(String clusterName, String resourceName, ResourceConfig resourceConfig) { updateResourceConfig(clusterName, resourceName, resourceConfig, false); } private void updateResourceConfig(String clusterName, String resourceName, ResourceConfig resourceConfig, boolean overwrite) { if (!ZKUtil.isClusterSetup(clusterName, _zkClient)) { throw new HelixException("fail to setup config. cluster: " + clusterName + " is NOT setup."); } HelixConfigScope scope = new HelixConfigScopeBuilder(ConfigScopeProperty.RESOURCE).forCluster(clusterName) .forResource(resourceName).build(); String zkPath = scope.getZkPath(); if (overwrite) { ZKUtil.createOrReplace(_zkClient, zkPath, resourceConfig.getRecord(), true); } else { ZKUtil.createOrUpdate(_zkClient, zkPath, resourceConfig.getRecord(), true, true); } } /** * Get instance config for given resource in given cluster. * * @param clusterName * @param instanceName * * @return */ public InstanceConfig getInstanceConfig(String clusterName, String instanceName) { if (!ZKUtil.isInstanceSetup(_zkClient, clusterName, instanceName, InstanceType.PARTICIPANT)) { throw new HelixException( "fail to get config. instance: " + instanceName + " is NOT setup in cluster: " + clusterName); } HelixConfigScope scope = new HelixConfigScopeBuilder(ConfigScopeProperty.PARTICIPANT).forCluster(clusterName) .forParticipant(instanceName).build(); ZNRecord record = getConfigZnRecord(scope); if (record == null) { LOG.warn("No config found at {}.", scope.getZkPath()); return null; } return new InstanceConfig(record); } /** * Set config of the given instance config. * The current instance config will be replaced with the given instanceConfig. * WARNING: This is not thread-safe or concurrent updates safe. * * @param clusterName * @param instanceName * @param instanceConfig * * @return */ public void setInstanceConfig(String clusterName, String instanceName, InstanceConfig instanceConfig) { updateInstanceConfig(clusterName, instanceName, instanceConfig, true); } /** * Update InstanceConfig of the given resource. The value of field in current config will be * replaced with the value of the same field in given config if it presents. If there is new field * in given config but not in current config, the field will be added into the current config.. * The list fields and map fields will be replaced as a single entry. * The current instanceConfig will be replaced with the given instanceConfig. WARNING: This is not * thread-safe or concurrent updates safe. * * * * @param clusterName * @param instanceName * @param instanceConfig * * @return */ public void updateInstanceConfig(String clusterName, String instanceName, InstanceConfig instanceConfig) { updateInstanceConfig(clusterName, instanceName, instanceConfig, false); } private void updateInstanceConfig(String clusterName, String instanceName, InstanceConfig instanceConfig, boolean overwrite) { if (!ZKUtil.isClusterSetup(clusterName, _zkClient)) { throw new HelixException("fail to setup config. cluster: " + clusterName + " is NOT setup."); } HelixConfigScope scope = new HelixConfigScopeBuilder(ConfigScopeProperty.PARTICIPANT).forCluster(clusterName) .forParticipant(instanceName).build(); String zkPath = scope.getZkPath(); if (!_zkClient.exists(zkPath)) { throw new HelixException( "updateInstanceConfig failed. Given InstanceConfig does not already exist. instance: " + instanceName); } if (overwrite) { ZKUtil.createOrReplace(_zkClient, zkPath, instanceConfig.getRecord(), true); } else { ZKUtil.createOrUpdate(_zkClient, zkPath, instanceConfig.getRecord(), true, true); } } /** * Closes ConfigAccessor: closes the stateful resources including the ZkClient. */ public void close() { if (_zkClient != null && !_usesExternalZkClient) { _zkClient.close(); } } @Override public void finalize() { close(); } public static class Builder extends GenericZkHelixApiBuilder<Builder> { public Builder() { } public ConfigAccessor build() { validate(); return new ConfigAccessor( createZkClient(_realmMode, _realmAwareZkConnectionConfig, _realmAwareZkClientConfig, _zkAddress), false); } } }
apache/jena
35,111
jena-langtag/src/main/java/org/apache/jena/langtag/LangTagRFC5646.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.langtag; import java.util.HashSet; import java.util.Locale; import java.util.Objects; import java.util.Set; /** * An implementation of parsing and formatting. * <a href="https://datatracker.ietf.org/doc/html/rfc5646">RFC 5646</a> * <p> * This implementation does not replace languages by their preferred form (e.g. * "i-klingon" has preferred form of "tlh", "zh-xiang" has a preferred form of "hsn"). * </p> * <p> * <a href="https://www.rfc-editor.org/info/rfc5646">RFC 5646: Tags for Identifying Languages</a> * </p> */ public final class LangTagRFC5646 implements LangTag { public static LangTag create(String string) { LangTagRFC5646 langtag = parser(string); return langtag; } // The language tag as given. private final String langTagString; /* Formatting: https://datatracker.ietf.org/doc/html/rfc5646#section-2.1.1 * * All subtags, including extension and private use subtags, * use lowercase letters with two exceptions: two-letter * and four-letter subtags that neither appear at the start of the tag * nor occur after singletons. Such two-letter subtags are all * uppercase (as in the tags "en-CA-x-ca" or "sgn-BE-FR") and four- * letter subtags are titlecase (as in the tag "az-Latn-x-latn"). * * See str() */ private final boolean isGrandfathered; // Private use of the whole Language-Tag private final boolean isPrivateUseLanguage; // Start/Finish indexes, excluding the initial '-' private final int language0; private final int language1; private final int script0; private final int script1; private final int region0; private final int region1; private final int variant0; private final int variant1; // All extensions. private final int extension0; private final int extension1; // Private use sub tag (not private use of the whole language tag, which starts "x-"). private final int privateuse0; private final int privateuse1; @Override public String getLanguage() { String x = getSubTag("Language", langTagString, language0, language1, CaseRule.LOWER); if ( ! isGrandfathered ) return x; // The general getSubTag code will get these wrong. // "sgn-BE-FR", "sgn-BE-NL", "sgn-CH-DE" return switch(x) { case "sgn-be-fr"->"sgn-BE-FR"; case "sgn-be-nl"->"sgn-BE-NL"; case "sgn-ch-de"->"sgn-CH-DE"; default -> x; }; } @Override public String getScript() { return getSubTag("Script", langTagString, script0, script1, CaseRule.TITLE); } @Override public String getRegion() { return getSubTag("Region", langTagString, region0, region1, CaseRule.UPPER); } @Override public String getVariant() { return getSubTag("Variant", langTagString, variant0, variant1, CaseRule.LOWER); } @Override public String getExtension() { return getSubTag("Extension", langTagString, extension0, extension1, CaseRule.LOWER); } @Override public String getPrivateUse() { return getSubTag("Private", langTagString, privateuse0, privateuse1, CaseRule.LOWER); } @Override public int hashCode() { return Objects.hash(langTagString, language0, language1, script0, script1, variant0, variant1, extension0, extension1, privateuse0, privateuse1, isGrandfathered, isPrivateUseLanguage); } /** * {@code .equals} and {@code .hashCode} * provide "same immutable object" semantics. * The language tags are treated case-sensitively. * * @See LangTagOps.sameLangTagAs for equivalent language tags. */ @Override public boolean equals(Object obj) { if ( this == obj ) return true; if ( !(obj instanceof LangTagRFC5646 other) ) return false; // All but the string. boolean sameParsePoints = extension0 == other.extension0 && extension1 == other.extension1 && isGrandfathered == other.isGrandfathered && isPrivateUseLanguage == other.isPrivateUseLanguage && language0 == other.language0 && language1 == other.language1 && privateuse0 == other.privateuse0 && privateuse1 == other.privateuse1 && region0 == other.region0 && region1 == other.region1 && script0 == other.script0 && script1 == other.script1 && variant0 == other.variant0 && variant1 == other.variant1; if ( ! sameParsePoints ) return false; return Objects.equals(langTagString, other.langTagString); } /** * Return the lang tag exactly as given. * Use {@link #str()} for the language tag formatted by the rules of RFC 5646. */ @Override public String toString() { return langTagString; } @Override public String str() { if ( isPrivateUseLanguage ) return InternalLangTag.lowercase(langTagString); String x = irregularFormat(langTagString); if ( x != null ) return x; // Format by parts // Works for en-GB-oed - the variant is not syntax compatible but the variant formatting rules applies. StringBuffer sb = new StringBuffer(); add(sb, getLanguage()); add(sb, getScript()); add(sb, getRegion()); add(sb, getVariant()); add(sb, getExtension()); add(sb, getPrivateUse()); return sb.toString(); } /** Return a string if there is special formatting for this language tag, else return null */ private static String irregularFormat(String langTagString) { // Some irregular special cases. if ( InternalLangTag.caseInsensitivePrefix(langTagString, "sgn-") ) { // "sgn-BE-FR", "sgn-BE-NL", "sgn-CH-DE" if ( langTagString.equalsIgnoreCase("sgn-BE-FR") ) return "sgn-BE-FR"; if ( langTagString.equalsIgnoreCase("sgn-BE-NL") ) return "sgn-BE-NL"; if ( langTagString.equalsIgnoreCase("sgn-CH-DE") ) return "sgn-CH-DE"; } if ( langTagString.startsWith("i-") || langTagString.startsWith("I-") ) { String lcLangTagStr = InternalLangTag.lowercase(langTagString); if ( irregular_i.contains(lcLangTagStr) ) return lcLangTagStr; } return null; } private void add(StringBuffer sb, String subtag) { if ( subtag == null ) return; if ( ! sb.isEmpty() ) sb.append('-'); sb.append(subtag); } private static String getSubTag(String label, String string, int start, int finish, CaseRule format) { if ( start == -1 ) return null; if ( finish == -1 ) throw new InternalError(InternalLangTag.titlecase(label)+" start is set but not subtag end: "+string); if ( start >= finish ) throw new InternalError(InternalLangTag.titlecase(label)+" start index is after "+InternalLangTag.lowercase(label)+" end index: "+string); String x = string.substring(start, finish); return switch(format) { case TITLE -> InternalLangTag.titlecase(x); case LOWER -> InternalLangTag.lowercase(x); case UPPER -> InternalLangTag.uppercase(x); }; } private static LangTagRFC5646 parser(String string) { LangTagRFC5646 langtag = new Builder().parse(string).build(); return langtag; } // Builder helps tidy the code. // It allowing the LangTagRFC5646 object to have final fields. // It means there is one place calling the constructor with its many arguments. private static class Builder { // All members of LangTagRFC String langTagString = null; boolean isGrandfathered = false; boolean isPrivateUseLanguage = false; int language0 = -1; int language1 = -1; int script0 = -1; int script1 = -1; int region0 = -1; int region1 = -1; int variant0 = -1; int variant1 = -1; int extension0 = -1; int extension1 = -1; int privateuse0 = -1; int privateuse1 = -1; Builder() {} private Builder parse(String string) { final Builder builder = this; LangTagRFC5646.parse(builder, string); return this; } private LangTagRFC5646 build() { return new LangTagRFC5646(langTagString, language0, language1, script0, script1, region0, region1, variant0, variant1, extension0, extension1, privateuse0, privateuse1, isGrandfathered, isPrivateUseLanguage); } } // Helpers private enum CaseRule { TITLE, LOWER, UPPER } private enum CharRange { ALPHA, ALPHANUM } // The whole of function 'parse' is enclosed in formatter:off // @formatter:off static void parse(Builder builder, String string) { // A segment is a sequence of A2ZN characters separated by '-'. builder.langTagString = string; final int N = string.length(); // Language-Tag = langtag ; normal language tags // / privateuse ; private use tag // / grandfathered ; grandfathered tags // langtag = language // ["-" script] // ["-" region] // *("-" variant) // *("-" extension) // ["-" privateuse] // // language = 2*3ALPHA ; shortest ISO 639 code // ["-" extlang] ; sometimes followed by // ; extended language subtags // / 4ALPHA ; or reserved for future use // / 5*8ALPHA ; or registered language subtag // // extlang = 3ALPHA ; selected ISO 639 codes // *2("-" 3ALPHA) ; permanently reserved // // script = 4ALPHA ; ISO 15924 code // // region = 2ALPHA ; ISO 3166-1 code // / 3DIGIT ; UN M.49 code // // variant = 5*8alphanum ; registered variants // / (DIGIT 3alphanum) // // extension = singleton 1*("-" (2*8alphanum)) // // ; Single alphanumerics // ; "x" reserved for private use // singleton = DIGIT ; 0 - 9 // / %x41-57 ; A - W // / %x59-5A ; Y - Z // / %x61-77 ; a - w // / %x79-7A ; y - z // // privateuse = "x" 1*("-" (1*8alphanum)) if ( N == 0 ) InternalLangTag.error("Empty string"); // ------------------- // language = (2*3ALPHA [ extlang ]); shortest ISO 639 code // / 4ALPHA ; reserved for future use // / 5*8ALPHA ; registered language subtag // extlang = 3ALPHA ; selected ISO 639 codes // *2("-" 3ALPHA) ; permanently reserved // Grandfathered // Must check first because the whole string (except "en-GB-oed") is the "language" if ( grandfathered(string) ) { // Regular: // "each tag, in its entirety, represents a language or collection of languages." // // Irregular: // With the exception of "en-GB-oed", which is a // variant of "en-GB", each of them, in its entirety, // represents a language. // builder.language0 = 0; builder.language1 = N; builder.isGrandfathered = true; // Exception. if ( string.equalsIgnoreCase("en-GB-oed") ) { // "oed" is "Oxford English Dictionary spelling" // Better is the replacement "en-GB-oxendict" builder.language0 = 0; builder.language1 = 2; builder.region0 = 3; builder.region1 = 5; // Non-standard variant. builder.variant0 = 6; builder.variant1 = N; } return; } // -- language int idx = 0; int idx2 = segmentNextFinish(string, N, idx); int segLen = segmentLength(N, idx, idx2); // Private use in the language position. if ( segLen == 1 ) { if ( string.startsWith("x-") || string.startsWith("X-") ) { /* The primary language subtag is the first subtag in a language tag and cannot be omitted, with two exceptions: o The single-character subtag 'x' as the primary subtag indicates that the language tag consists solely of subtags whose meaning is defined by private agreement. For example, in the tag "x-fr-CH", the subtags 'fr' and 'CH' do not represent the French language or the country of Switzerland (or any other value in the IANA registry) unless there is a private agreement in place to do so. See Section 4.6. */ builder.isPrivateUseLanguage = true; int idxPrivateUseStart = 0; int idxPrivateUseEnd = maybeSubtags(string, N, idxPrivateUseStart+segLen, CharRange.ALPHANUM, 1, 8); builder.privateuse0 = idxPrivateUseStart; builder.privateuse1 = idxPrivateUseEnd; if ( builder.privateuse1 < N ) InternalLangTag.error("Trailing characters in private langtag: '%s'", string.substring(builder.privateuse1)); return; } // else InternalLangTag.error("Language part is 1 character: it must be 2-3 characters (4-8 reserved for future use), \"x-\", or a recognized grandfathered tag"); } if ( segLen > 8 ) InternalLangTag.error("Language too long (2-3 characters, 4-8 reserved for future use)"); if ( idx2 < 0 ) { // language only. builder.language0 = 0; builder.language1 = N; InternalLangTag.checkAlpha(string, N, builder.language0, builder.language1); return; } if ( idx == idx2 ) InternalLangTag.error("Can not find the language subtag: '%s'", string); builder.language0 = idx; if ( segLen == 2 || segLen == 3 ) { // -- Language extension subtags // language = 2*3ALPHA ; shortest ISO 639 code // ["-" extlang] // extlang = 3ALPHA ; selected ISO 639 codes // *2("-" 3ALPHA) ; permanently reserved int extStart = idx+segLen; InternalLangTag.checkAlpha(string, N, builder.language0, extStart); // Extensions are 1 to 3 3ALPHA subtags int extEnd = maybeSubtags(string, N, extStart, CharRange.ALPHA, 3, 3); if ( extEnd > extStart ) { idx2 = extEnd; InternalLangTag.checkAlphaMinus(string, N, extStart, builder.language1); } } else if ( segLen >= 4 && segLen <= 8 ) { // / 4ALPHA ; or reserved for future use // / 5*8ALPHA ; or registered language subtag // Dubious. InternalLangTag.checkAlpha(string, N, builder.language0, idx2); } else { InternalLangTag.error("Language too long (2-3 characters, 4-8 reserved for future use)"); } builder.language1 = idx2; // Info noteSegment("language", string, builder.language0, builder.language1); // Move on - next subtag idx = segmentNextStart(N, idx, idx2); idx2 = segmentNextFinish(string, N, idx); segLen = segmentLength(N, idx, idx2); // -- End langtag // ---- script // script = 4ALPHA ; ISO 15924 code if ( segLen == 4 && InternalLangTag.isAlpha(string.charAt(idx)) ) { // Script // Not a digit - which is a variant. // variant = ... / (DIGIT 3alphanum) int start = idx; int finish = idx+segLen; builder.script0 = idx; builder.script1 = idx+segLen; InternalLangTag.checkAlpha(string, N, builder.script0, builder.script1); noteSegment("script", string, builder.script0, builder.script1); // Move on. idx = segmentNextStart(N, idx, idx2); idx2 = segmentNextFinish(string, N, idx); segLen = segmentLength(N, idx, idx2); } // -- End script // ---- region // region = 2ALPHA ; ISO 3166-1 code // / 3DIGIT ; UN M.49 code if ( segLen == 2 || segLen == 3 ) { // Region builder.region0 = idx; builder.region1 = idx+segLen; if ( segLen == 2 ) InternalLangTag.checkAlpha(string, N, builder.region0, builder.region1); else InternalLangTag.checkDigits(string, N, builder.region0, builder.region1); noteSegment("region", string, builder.region0, builder.region1); // Move on. idx = segmentNextStart(N, idx, idx2); idx2 = segmentNextFinish(string, N, idx); segLen = segmentLength(N, idx, idx2); } // -- End region // ---- variant // variant = 5*8alphanum ; registered variants // / (DIGIT 3alphanum) for ( ;; ) { if ( segLen >= 5 && segLen <= 8) { // variant 5*8alphanum if ( builder.variant0 == -1 ) builder.variant0 = idx; builder.variant1 = idx+segLen; InternalLangTag.checkAlphaNum(string, N, idx, builder.variant1); noteSegment("variant", string, builder.variant0, builder.variant1); // Move on. idx = segmentNextStart(N, idx, idx2); idx2 = segmentNextFinish(string, N, idx); segLen = segmentLength(N, idx, idx2); continue; } if ( segLen == 4 ) { // variant // DIGIT 3alphanum char ch = string.charAt(idx); if ( ch >= '0' || ch <= '9' ) { if ( builder.variant0 == -1 ) builder.variant0 = idx; builder.variant1 = idx+segLen; InternalLangTag.checkAlphaNum(string, N, idx, builder.variant1); noteSegment("variant", string, builder.variant0, builder.variant1); } // Move on. idx = segmentNextStart(N, idx, idx2); idx2 = segmentNextFinish(string, N, idx); segLen = segmentLength(N, idx, idx2); continue; } break; } // -- End variant // ---- extension and private use // extension = singleton 1*("-" (2*8alphanum)) // privateuse = "x" 1*("-" (1*8alphanum)) boolean inPrivateUseSubtag = false; Set<Character> extSingletons = null; new HashSet<>(); while ( segLen == 1 ) { char singleton = string.charAt(idx); if ( singleton == 'x' || singleton == 'X' ) { inPrivateUseSubtag = true; break; } if ( extSingletons == null ) { extSingletons = new HashSet<>(); extSingletons.add(singleton); } else { boolean newEntry = extSingletons.add(singleton); if ( ! newEntry ) InternalLangTag.error("Duplicate extension singleton: '"+singleton+"'"); } if ( builder.extension0 == -1 ) builder.extension0 = idx; // Extension. // 2*8 alphanum int idxExtStart = idx+segLen; int idxEndExtra = maybeSubtags(string, N, idxExtStart, CharRange.ALPHANUM, 2, 8); // Expecting at least one subtag. if ( idxExtStart == idxEndExtra ) InternalLangTag.error("Ill-formed extension"); if ( idxEndExtra > idxExtStart ) idx2 = idxEndExtra; builder.extension1 = idx2; InternalLangTag.checkAlphaNumMinus(string, N, builder.extension0, builder.extension1); noteSegment("extension", string, builder.extension0, builder.extension1); // Move on. idx = segmentNextStart(N, idx, idx2); idx2 = segmentNextFinish(string, N, idx); segLen = segmentLength(N, idx, idx2); if ( segLen == 0 ) InternalLangTag.error("Ill-formed extension. Trailing dash."); } // ---- private use if ( inPrivateUseSubtag ) { builder.privateuse0 = idx; // privateuse = "x" 1*("-" (1*8alphanum)) int idxPrivateUseStart = idx+segLen; int idxPrivateUseEnd = maybeSubtags(string, N, idxPrivateUseStart, CharRange.ALPHANUM, 1, 8); // Expecting at least one subtag. if ( idxPrivateUseStart == idxPrivateUseEnd ) InternalLangTag.error("Ill-formed private use component"); if ( idxPrivateUseEnd > idxPrivateUseStart ) idx2 = idxPrivateUseEnd; builder.privateuse1 = idx2; InternalLangTag.checkAlphaNumMinus(string, N, builder.privateuse0, builder.privateuse1); noteSegment("private use", string, builder.privateuse0, builder.privateuse1); // Private use runs to end of string. But do checking. // Move on. idx = segmentNextStart(N, idx, idx2); idx2 = segmentNextFinish(string, N, idx); segLen = segmentLength(N, idx, idx2); if ( segLen == 0 ) InternalLangTag.error("Ill-formed private use subtag. Trailing dash."); } // -- End extension and privateuse // Did we process everything? No segment: idx == -1 idx2 == -1 seglen == -1 if ( idx != -1 && idx < N ) InternalLangTag.error("Trailing characters: '%s'", string.substring(idx)); if ( idx2 >= 0 ) InternalLangTag.error("Bad string: '%s'", string); } // @formatter:on private LangTagRFC5646(String string, int language0, int language1, int script0, int script1, int region0, int region1, int variant0, int variant1, int extension0, int extension1, int privateuse0, int privateuse1, boolean isGrandfathered, boolean isPrivateUseLanguage) { this.langTagString = string; this.isGrandfathered = isGrandfathered; this.isPrivateUseLanguage = isPrivateUseLanguage; this.language0 = language0; this.language1 = language1; this.script0 = script0; this.script1 = script1; this.region0 = region0; this.region1 = region1; this.variant0 = variant0; this.variant1 = variant1; this.extension0 = extension0; this.extension1 = extension1; this.privateuse0 = privateuse0; this.privateuse1 = privateuse1; } /** Zero or more subtags, each between min and max length. */ private static int maybeSubtags(String string, int N, int idxStart, CharRange charRange, int min, int max) { // Looking at the '-' or end of string. int numExt = 0; int count = 0; int x = idxStart; // Outer loop - each subtag segment, having read at the "-" while ( x >= 0 && x < N ) { char ch = string.charAt(x); if ( ch != '-' ) break; int x1 = maybeOneSubtag(string, N, x+1, charRange, min, max); if ( x1 <= 0 ) break; if ( x1 == N ) { x = N; break; } x = x1; } return x; } /** * Peek for a segment between min and max in length. * The initial "-" has been read. */ private static int maybeOneSubtag(String string, int N, int idxStart, CharRange charRange, int min, int max) { int idx = idxStart; if ( idx >= N ) return -1; int idx2 = segmentNextFinish(string, N, idx); int segLen = segmentLength(N, idx, idx2); if ( segLen == 0 ) InternalLangTag.error("Bad builder. Found '--'"); if ( segLen < min || segLen > max ) return -1; boolean valid = switch (charRange) { case ALPHA -> InternalLangTag.isAlpha(string, idxStart, idxStart+segLen); case ALPHANUM -> InternalLangTag.isAlphaNum(string, idxStart, idxStart+segLen); }; if ( !valid ) return -1; return idxStart+segLen; } // Start/Finish indexes, excluding the initial '-' private static String getSegment(String string, int x0, int x1) { if ( x0 < 0 && x1 < 0 ) return null; if ( x0 < 0 || x1 < 0 ) { InternalLangTag.error("Segment one undef index"); return null; } return string.substring(x0, x1); } /** Length of a segment, excluding any "-" */ private static int segmentLength(int N, int idx, int idx2) { if ( idx < 0 ) return -1; if ( idx2 < 0 ) return N-idx; return idx2-idx; } /** Index of the start of the next segment. */ private static int segmentNextStart(int N, int idx, int idx2) { if ( idx2 == -1 ) return -1; idx = idx2; // Skip '-' idx++; return idx; } /** Note segment - development aid. */ private static void noteSegment(String label, String string, int idx, int idx2) { // if ( idx2 < 0 ) { // System.out.printf("%-10s [%d,%d) '%s'\n", label, idx, idx2, string.substring(idx)); // return; // } // System.out.printf("%-10s [%d,%d) '%s'\n",label, idx, idx2, string.substring(idx, idx2)); } /** Return the index of the next '-' or -1 */ private static int segmentNextFinish(String x, int N, int idx) { if ( idx == -1 ) return -1; if ( idx == N ) return -1; for ( ; idx < N ; idx++ ) { char ch = x.charAt(idx); if ( ch == '-' ) { if ( idx == N-1 ) { // The case of "subtag-" InternalLangTag.error("Language tag string ends in '-'"); } return idx; } } return -1; } // --- // RFC 5646: regular tags // Grandfathered tags that (appear to) match the 'langtag' production in // Figure 1 are considered 'regular' grandfathered tags. These tags // contain one or more subtags that either do not individually appear in // the registry or appear but with a different semantic meaning: each // tag, in its entirety, represents a language or collection of // languages. private static boolean grandfathered(String s) { s = s.toLowerCase(Locale.ROOT); return grandfathered.contains(s) || regular.contains(s) ; } // These tags match the 'langtag' production, but their subtags are not extended // language or variant subtags: their meaning is defined by their registration and // all of these are deprecated in favor of a more modern subtag or sequence of // subtags private static Set<String> regular = Set.of("art-lojban", "cel-gaulish", "no-bok", "no-nyn", "zh-guoyu", "zh-hakka", "zh-min", "zh-min-nan", "zh-xiang"); // RFC 5646: irregular tags do not match the 'langtag' production and would not be 'well-formed' // Grandfathered tags that do not match the 'langtag' production in the // ABNF and would otherwise be invalid are considered 'irregular' // grandfathered tags. With the exception of "en-GB-oed", which is a // variant of "en-GB", each of them, in its entirety, represents a // language. private static Set<String> irregular = Set.of("en-GB-oed", "i-ami", "i-bnn", "i-default", "i-enochian", "i-hak", "i-klingon", "i-lux", "i-mingo", "i-navajo", "i-pwn", "i-tao", "i-tay", "i-tsu", // These are irregular in that they are "primary subtag ("sgn" - sign language) // then two region-like subtags. // They do obey the basic formatting rule - two letters non-primary subtag is uppercase. "sgn-BE-FR", "sgn-BE-NL", "sgn-CH-DE"); // The "i-" irregulars. private static Set<String> irregular_i = Set.of("i-ami", "i-bnn", "i-default", "i-enochian", "i-hak", "i-klingon", "i-lux", "i-mingo", "i-navajo", "i-pwn", "i-tao", "i-tay", "i-tsu"); // --- private static Set<String> grandfathered = new HashSet<>(2*(regular.size()+irregular.size())); static { for ( String s : irregular ) grandfathered.add(InternalLangTag.lowercase(s)); for ( String s : regular ) grandfathered.add(InternalLangTag.lowercase(s)); } // @formatter:off /* RFC 5646 Section 2.1 ABNF definition: https://datatracker.ietf.org/doc/html/rfc5646#section-2.1 Language-Tag = langtag ; normal language tags / privateuse ; private use tag / grandfathered ; grandfathered tags langtag = language ["-" script] ["-" region] *("-" variant) *("-" extension) ["-" privateuse] language = 2*3ALPHA ; shortest ISO 639 code ["-" extlang] ; sometimes followed by ; extended language subtags / 4ALPHA ; or reserved for future use / 5*8ALPHA ; or registered language subtag extlang = 3ALPHA ; selected ISO 639 codes *2("-" 3ALPHA) ; permanently reserved script = 4ALPHA ; ISO 15924 code region = 2ALPHA ; ISO 3166-1 code / 3DIGIT ; UN M.49 code variant = 5*8alphanum ; registered variants / (DIGIT 3alphanum) extension = singleton 1*("-" (2*8alphanum)) ; Single alphanumerics ; "x" reserved for private use singleton = DIGIT ; 0 - 9 / %x41-57 ; A - W / %x59-5A ; Y - Z / %x61-77 ; a - w / %x79-7A ; y - z privateuse = "x" 1*("-" (1*8alphanum)) grandfathered = irregular ; non-redundant tags registered / regular ; during the RFC 3066 era irregular = "en-GB-oed" ; irregular tags do not match / "i-ami" ; the 'langtag' production and / "i-bnn" ; would not otherwise be / "i-default" ; considered 'well-formed' / "i-enochian" ; These tags are all valid, / "i-hak" ; but most are deprecated / "i-klingon" ; in favor of more modern / "i-lux" ; subtags or subtag / "i-mingo" ; combination / "i-navajo" / "i-pwn" / "i-tao" / "i-tay" / "i-tsu" / "sgn-BE-FR" / "sgn-BE-NL" / "sgn-CH-DE" regular = "art-lojban" ; these tags match the 'langtag' / "cel-gaulish" ; production, but their subtags / "no-bok" ; are not extended language / "no-nyn" ; or variant subtags: their meaning / "zh-guoyu" ; is defined by their registration / "zh-hakka" ; and all of these are deprecated / "zh-min" ; in favor of a more modern / "zh-min-nan" ; subtag or sequence of subtags / "zh-xiang" alphanum = (ALPHA / DIGIT) ; letters and numbers */ // @formatter:on }
googleapis/google-cloud-java
35,229
java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/BulkSetLabelsRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/compute/v1/compute.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.compute.v1; /** * * * <pre> * </pre> * * Protobuf type {@code google.cloud.compute.v1.BulkSetLabelsRequest} */ public final class BulkSetLabelsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.compute.v1.BulkSetLabelsRequest) BulkSetLabelsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use BulkSetLabelsRequest.newBuilder() to construct. private BulkSetLabelsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private BulkSetLabelsRequest() { labelFingerprint_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new BulkSetLabelsRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_BulkSetLabelsRequest_descriptor; } @SuppressWarnings({"rawtypes"}) @java.lang.Override protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection( int number) { switch (number) { case 500195327: return internalGetLabels(); default: throw new RuntimeException("Invalid map field number: " + number); } } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_BulkSetLabelsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.BulkSetLabelsRequest.class, com.google.cloud.compute.v1.BulkSetLabelsRequest.Builder.class); } private int bitField0_; public static final int LABEL_FINGERPRINT_FIELD_NUMBER = 178124825; @SuppressWarnings("serial") private volatile java.lang.Object labelFingerprint_ = ""; /** * * * <pre> * The fingerprint of the previous set of labels for this resource, used to detect conflicts. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You may optionally provide an up-to-date fingerprint hash in order to update or change labels. Make a get() request to the resource to get the latest fingerprint. * </pre> * * <code>optional string label_fingerprint = 178124825;</code> * * @return Whether the labelFingerprint field is set. */ @java.lang.Override public boolean hasLabelFingerprint() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The fingerprint of the previous set of labels for this resource, used to detect conflicts. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You may optionally provide an up-to-date fingerprint hash in order to update or change labels. Make a get() request to the resource to get the latest fingerprint. * </pre> * * <code>optional string label_fingerprint = 178124825;</code> * * @return The labelFingerprint. */ @java.lang.Override public java.lang.String getLabelFingerprint() { java.lang.Object ref = labelFingerprint_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); labelFingerprint_ = s; return s; } } /** * * * <pre> * The fingerprint of the previous set of labels for this resource, used to detect conflicts. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You may optionally provide an up-to-date fingerprint hash in order to update or change labels. Make a get() request to the resource to get the latest fingerprint. * </pre> * * <code>optional string label_fingerprint = 178124825;</code> * * @return The bytes for labelFingerprint. */ @java.lang.Override public com.google.protobuf.ByteString getLabelFingerprintBytes() { java.lang.Object ref = labelFingerprint_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); labelFingerprint_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int LABELS_FIELD_NUMBER = 500195327; private static final class LabelsDefaultEntryHolder { static final com.google.protobuf.MapEntry<java.lang.String, java.lang.String> defaultEntry = com.google.protobuf.MapEntry.<java.lang.String, java.lang.String>newDefaultInstance( com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_BulkSetLabelsRequest_LabelsEntry_descriptor, com.google.protobuf.WireFormat.FieldType.STRING, "", com.google.protobuf.WireFormat.FieldType.STRING, ""); } @SuppressWarnings("serial") private com.google.protobuf.MapField<java.lang.String, java.lang.String> labels_; private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetLabels() { if (labels_ == null) { return com.google.protobuf.MapField.emptyMapField(LabelsDefaultEntryHolder.defaultEntry); } return labels_; } public int getLabelsCount() { return internalGetLabels().getMap().size(); } /** * * * <pre> * The labels to set for this resource. * </pre> * * <code>map&lt;string, string&gt; labels = 500195327;</code> */ @java.lang.Override public boolean containsLabels(java.lang.String key) { if (key == null) { throw new NullPointerException("map key"); } return internalGetLabels().getMap().containsKey(key); } /** Use {@link #getLabelsMap()} instead. */ @java.lang.Override @java.lang.Deprecated public java.util.Map<java.lang.String, java.lang.String> getLabels() { return getLabelsMap(); } /** * * * <pre> * The labels to set for this resource. * </pre> * * <code>map&lt;string, string&gt; labels = 500195327;</code> */ @java.lang.Override public java.util.Map<java.lang.String, java.lang.String> getLabelsMap() { return internalGetLabels().getMap(); } /** * * * <pre> * The labels to set for this resource. * </pre> * * <code>map&lt;string, string&gt; labels = 500195327;</code> */ @java.lang.Override public /* nullable */ java.lang.String getLabelsOrDefault( java.lang.String key, /* nullable */ java.lang.String defaultValue) { if (key == null) { throw new NullPointerException("map key"); } java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap(); return map.containsKey(key) ? map.get(key) : defaultValue; } /** * * * <pre> * The labels to set for this resource. * </pre> * * <code>map&lt;string, string&gt; labels = 500195327;</code> */ @java.lang.Override public java.lang.String getLabelsOrThrow(java.lang.String key) { if (key == null) { throw new NullPointerException("map key"); } java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap(); if (!map.containsKey(key)) { throw new java.lang.IllegalArgumentException(); } return map.get(key); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 178124825, labelFingerprint_); } com.google.protobuf.GeneratedMessageV3.serializeStringMapTo( output, internalGetLabels(), LabelsDefaultEntryHolder.defaultEntry, 500195327); getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(178124825, labelFingerprint_); } for (java.util.Map.Entry<java.lang.String, java.lang.String> entry : internalGetLabels().getMap().entrySet()) { com.google.protobuf.MapEntry<java.lang.String, java.lang.String> labels__ = LabelsDefaultEntryHolder.defaultEntry .newBuilderForType() .setKey(entry.getKey()) .setValue(entry.getValue()) .build(); size += com.google.protobuf.CodedOutputStream.computeMessageSize(500195327, labels__); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.compute.v1.BulkSetLabelsRequest)) { return super.equals(obj); } com.google.cloud.compute.v1.BulkSetLabelsRequest other = (com.google.cloud.compute.v1.BulkSetLabelsRequest) obj; if (hasLabelFingerprint() != other.hasLabelFingerprint()) return false; if (hasLabelFingerprint()) { if (!getLabelFingerprint().equals(other.getLabelFingerprint())) return false; } if (!internalGetLabels().equals(other.internalGetLabels())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasLabelFingerprint()) { hash = (37 * hash) + LABEL_FINGERPRINT_FIELD_NUMBER; hash = (53 * hash) + getLabelFingerprint().hashCode(); } if (!internalGetLabels().getMap().isEmpty()) { hash = (37 * hash) + LABELS_FIELD_NUMBER; hash = (53 * hash) + internalGetLabels().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.compute.v1.BulkSetLabelsRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.BulkSetLabelsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.BulkSetLabelsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.BulkSetLabelsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.BulkSetLabelsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.BulkSetLabelsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.BulkSetLabelsRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.BulkSetLabelsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.BulkSetLabelsRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.BulkSetLabelsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.BulkSetLabelsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.BulkSetLabelsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.compute.v1.BulkSetLabelsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * </pre> * * Protobuf type {@code google.cloud.compute.v1.BulkSetLabelsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.BulkSetLabelsRequest) com.google.cloud.compute.v1.BulkSetLabelsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_BulkSetLabelsRequest_descriptor; } @SuppressWarnings({"rawtypes"}) protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection( int number) { switch (number) { case 500195327: return internalGetLabels(); default: throw new RuntimeException("Invalid map field number: " + number); } } @SuppressWarnings({"rawtypes"}) protected com.google.protobuf.MapFieldReflectionAccessor internalGetMutableMapFieldReflection( int number) { switch (number) { case 500195327: return internalGetMutableLabels(); default: throw new RuntimeException("Invalid map field number: " + number); } } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_BulkSetLabelsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.BulkSetLabelsRequest.class, com.google.cloud.compute.v1.BulkSetLabelsRequest.Builder.class); } // Construct using com.google.cloud.compute.v1.BulkSetLabelsRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; labelFingerprint_ = ""; internalGetMutableLabels().clear(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_BulkSetLabelsRequest_descriptor; } @java.lang.Override public com.google.cloud.compute.v1.BulkSetLabelsRequest getDefaultInstanceForType() { return com.google.cloud.compute.v1.BulkSetLabelsRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.compute.v1.BulkSetLabelsRequest build() { com.google.cloud.compute.v1.BulkSetLabelsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.compute.v1.BulkSetLabelsRequest buildPartial() { com.google.cloud.compute.v1.BulkSetLabelsRequest result = new com.google.cloud.compute.v1.BulkSetLabelsRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.compute.v1.BulkSetLabelsRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.labelFingerprint_ = labelFingerprint_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.labels_ = internalGetLabels(); result.labels_.makeImmutable(); } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.compute.v1.BulkSetLabelsRequest) { return mergeFrom((com.google.cloud.compute.v1.BulkSetLabelsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.compute.v1.BulkSetLabelsRequest other) { if (other == com.google.cloud.compute.v1.BulkSetLabelsRequest.getDefaultInstance()) return this; if (other.hasLabelFingerprint()) { labelFingerprint_ = other.labelFingerprint_; bitField0_ |= 0x00000001; onChanged(); } internalGetMutableLabels().mergeFrom(other.internalGetLabels()); bitField0_ |= 0x00000002; this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 1424998602: { labelFingerprint_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 1424998602 case -293404678: { com.google.protobuf.MapEntry<java.lang.String, java.lang.String> labels__ = input.readMessage( LabelsDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); internalGetMutableLabels() .getMutableMap() .put(labels__.getKey(), labels__.getValue()); bitField0_ |= 0x00000002; break; } // case -293404678 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object labelFingerprint_ = ""; /** * * * <pre> * The fingerprint of the previous set of labels for this resource, used to detect conflicts. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You may optionally provide an up-to-date fingerprint hash in order to update or change labels. Make a get() request to the resource to get the latest fingerprint. * </pre> * * <code>optional string label_fingerprint = 178124825;</code> * * @return Whether the labelFingerprint field is set. */ public boolean hasLabelFingerprint() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The fingerprint of the previous set of labels for this resource, used to detect conflicts. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You may optionally provide an up-to-date fingerprint hash in order to update or change labels. Make a get() request to the resource to get the latest fingerprint. * </pre> * * <code>optional string label_fingerprint = 178124825;</code> * * @return The labelFingerprint. */ public java.lang.String getLabelFingerprint() { java.lang.Object ref = labelFingerprint_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); labelFingerprint_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The fingerprint of the previous set of labels for this resource, used to detect conflicts. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You may optionally provide an up-to-date fingerprint hash in order to update or change labels. Make a get() request to the resource to get the latest fingerprint. * </pre> * * <code>optional string label_fingerprint = 178124825;</code> * * @return The bytes for labelFingerprint. */ public com.google.protobuf.ByteString getLabelFingerprintBytes() { java.lang.Object ref = labelFingerprint_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); labelFingerprint_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The fingerprint of the previous set of labels for this resource, used to detect conflicts. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You may optionally provide an up-to-date fingerprint hash in order to update or change labels. Make a get() request to the resource to get the latest fingerprint. * </pre> * * <code>optional string label_fingerprint = 178124825;</code> * * @param value The labelFingerprint to set. * @return This builder for chaining. */ public Builder setLabelFingerprint(java.lang.String value) { if (value == null) { throw new NullPointerException(); } labelFingerprint_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The fingerprint of the previous set of labels for this resource, used to detect conflicts. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You may optionally provide an up-to-date fingerprint hash in order to update or change labels. Make a get() request to the resource to get the latest fingerprint. * </pre> * * <code>optional string label_fingerprint = 178124825;</code> * * @return This builder for chaining. */ public Builder clearLabelFingerprint() { labelFingerprint_ = getDefaultInstance().getLabelFingerprint(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * The fingerprint of the previous set of labels for this resource, used to detect conflicts. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You may optionally provide an up-to-date fingerprint hash in order to update or change labels. Make a get() request to the resource to get the latest fingerprint. * </pre> * * <code>optional string label_fingerprint = 178124825;</code> * * @param value The bytes for labelFingerprint to set. * @return This builder for chaining. */ public Builder setLabelFingerprintBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); labelFingerprint_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.protobuf.MapField<java.lang.String, java.lang.String> labels_; private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetLabels() { if (labels_ == null) { return com.google.protobuf.MapField.emptyMapField(LabelsDefaultEntryHolder.defaultEntry); } return labels_; } private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetMutableLabels() { if (labels_ == null) { labels_ = com.google.protobuf.MapField.newMapField(LabelsDefaultEntryHolder.defaultEntry); } if (!labels_.isMutable()) { labels_ = labels_.copy(); } bitField0_ |= 0x00000002; onChanged(); return labels_; } public int getLabelsCount() { return internalGetLabels().getMap().size(); } /** * * * <pre> * The labels to set for this resource. * </pre> * * <code>map&lt;string, string&gt; labels = 500195327;</code> */ @java.lang.Override public boolean containsLabels(java.lang.String key) { if (key == null) { throw new NullPointerException("map key"); } return internalGetLabels().getMap().containsKey(key); } /** Use {@link #getLabelsMap()} instead. */ @java.lang.Override @java.lang.Deprecated public java.util.Map<java.lang.String, java.lang.String> getLabels() { return getLabelsMap(); } /** * * * <pre> * The labels to set for this resource. * </pre> * * <code>map&lt;string, string&gt; labels = 500195327;</code> */ @java.lang.Override public java.util.Map<java.lang.String, java.lang.String> getLabelsMap() { return internalGetLabels().getMap(); } /** * * * <pre> * The labels to set for this resource. * </pre> * * <code>map&lt;string, string&gt; labels = 500195327;</code> */ @java.lang.Override public /* nullable */ java.lang.String getLabelsOrDefault( java.lang.String key, /* nullable */ java.lang.String defaultValue) { if (key == null) { throw new NullPointerException("map key"); } java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap(); return map.containsKey(key) ? map.get(key) : defaultValue; } /** * * * <pre> * The labels to set for this resource. * </pre> * * <code>map&lt;string, string&gt; labels = 500195327;</code> */ @java.lang.Override public java.lang.String getLabelsOrThrow(java.lang.String key) { if (key == null) { throw new NullPointerException("map key"); } java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap(); if (!map.containsKey(key)) { throw new java.lang.IllegalArgumentException(); } return map.get(key); } public Builder clearLabels() { bitField0_ = (bitField0_ & ~0x00000002); internalGetMutableLabels().getMutableMap().clear(); return this; } /** * * * <pre> * The labels to set for this resource. * </pre> * * <code>map&lt;string, string&gt; labels = 500195327;</code> */ public Builder removeLabels(java.lang.String key) { if (key == null) { throw new NullPointerException("map key"); } internalGetMutableLabels().getMutableMap().remove(key); return this; } /** Use alternate mutation accessors instead. */ @java.lang.Deprecated public java.util.Map<java.lang.String, java.lang.String> getMutableLabels() { bitField0_ |= 0x00000002; return internalGetMutableLabels().getMutableMap(); } /** * * * <pre> * The labels to set for this resource. * </pre> * * <code>map&lt;string, string&gt; labels = 500195327;</code> */ public Builder putLabels(java.lang.String key, java.lang.String value) { if (key == null) { throw new NullPointerException("map key"); } if (value == null) { throw new NullPointerException("map value"); } internalGetMutableLabels().getMutableMap().put(key, value); bitField0_ |= 0x00000002; return this; } /** * * * <pre> * The labels to set for this resource. * </pre> * * <code>map&lt;string, string&gt; labels = 500195327;</code> */ public Builder putAllLabels(java.util.Map<java.lang.String, java.lang.String> values) { internalGetMutableLabels().getMutableMap().putAll(values); bitField0_ |= 0x00000002; return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.BulkSetLabelsRequest) } // @@protoc_insertion_point(class_scope:google.cloud.compute.v1.BulkSetLabelsRequest) private static final com.google.cloud.compute.v1.BulkSetLabelsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.compute.v1.BulkSetLabelsRequest(); } public static com.google.cloud.compute.v1.BulkSetLabelsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<BulkSetLabelsRequest> PARSER = new com.google.protobuf.AbstractParser<BulkSetLabelsRequest>() { @java.lang.Override public BulkSetLabelsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<BulkSetLabelsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<BulkSetLabelsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.compute.v1.BulkSetLabelsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,188
java-analytics-admin/proto-google-analytics-admin-v1beta/src/main/java/com/google/analytics/admin/v1beta/UpdateCustomDimensionRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/analytics/admin/v1beta/analytics_admin.proto // Protobuf Java Version: 3.25.8 package com.google.analytics.admin.v1beta; /** * * * <pre> * Request message for UpdateCustomDimension RPC. * </pre> * * Protobuf type {@code google.analytics.admin.v1beta.UpdateCustomDimensionRequest} */ public final class UpdateCustomDimensionRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.analytics.admin.v1beta.UpdateCustomDimensionRequest) UpdateCustomDimensionRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateCustomDimensionRequest.newBuilder() to construct. private UpdateCustomDimensionRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateCustomDimensionRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateCustomDimensionRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.analytics.admin.v1beta.AnalyticsAdminProto .internal_static_google_analytics_admin_v1beta_UpdateCustomDimensionRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.analytics.admin.v1beta.AnalyticsAdminProto .internal_static_google_analytics_admin_v1beta_UpdateCustomDimensionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest.class, com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest.Builder.class); } private int bitField0_; public static final int CUSTOM_DIMENSION_FIELD_NUMBER = 1; private com.google.analytics.admin.v1beta.CustomDimension customDimension_; /** * * * <pre> * The CustomDimension to update * </pre> * * <code>.google.analytics.admin.v1beta.CustomDimension custom_dimension = 1;</code> * * @return Whether the customDimension field is set. */ @java.lang.Override public boolean hasCustomDimension() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The CustomDimension to update * </pre> * * <code>.google.analytics.admin.v1beta.CustomDimension custom_dimension = 1;</code> * * @return The customDimension. */ @java.lang.Override public com.google.analytics.admin.v1beta.CustomDimension getCustomDimension() { return customDimension_ == null ? com.google.analytics.admin.v1beta.CustomDimension.getDefaultInstance() : customDimension_; } /** * * * <pre> * The CustomDimension to update * </pre> * * <code>.google.analytics.admin.v1beta.CustomDimension custom_dimension = 1;</code> */ @java.lang.Override public com.google.analytics.admin.v1beta.CustomDimensionOrBuilder getCustomDimensionOrBuilder() { return customDimension_ == null ? com.google.analytics.admin.v1beta.CustomDimension.getDefaultInstance() : customDimension_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. To replace the entire entity, use one path with the string "*" to * match all fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. To replace the entire entity, use one path with the string "*" to * match all fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. To replace the entire entity, use one path with the string "*" to * match all fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getCustomDimension()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getCustomDimension()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest)) { return super.equals(obj); } com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest other = (com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest) obj; if (hasCustomDimension() != other.hasCustomDimension()) return false; if (hasCustomDimension()) { if (!getCustomDimension().equals(other.getCustomDimension())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasCustomDimension()) { hash = (37 * hash) + CUSTOM_DIMENSION_FIELD_NUMBER; hash = (53 * hash) + getCustomDimension().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for UpdateCustomDimension RPC. * </pre> * * Protobuf type {@code google.analytics.admin.v1beta.UpdateCustomDimensionRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.analytics.admin.v1beta.UpdateCustomDimensionRequest) com.google.analytics.admin.v1beta.UpdateCustomDimensionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.analytics.admin.v1beta.AnalyticsAdminProto .internal_static_google_analytics_admin_v1beta_UpdateCustomDimensionRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.analytics.admin.v1beta.AnalyticsAdminProto .internal_static_google_analytics_admin_v1beta_UpdateCustomDimensionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest.class, com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest.Builder.class); } // Construct using com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getCustomDimensionFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; customDimension_ = null; if (customDimensionBuilder_ != null) { customDimensionBuilder_.dispose(); customDimensionBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.analytics.admin.v1beta.AnalyticsAdminProto .internal_static_google_analytics_admin_v1beta_UpdateCustomDimensionRequest_descriptor; } @java.lang.Override public com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest getDefaultInstanceForType() { return com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest.getDefaultInstance(); } @java.lang.Override public com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest build() { com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest buildPartial() { com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest result = new com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.customDimension_ = customDimensionBuilder_ == null ? customDimension_ : customDimensionBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest) { return mergeFrom((com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest other) { if (other == com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest.getDefaultInstance()) return this; if (other.hasCustomDimension()) { mergeCustomDimension(other.getCustomDimension()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getCustomDimensionFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.analytics.admin.v1beta.CustomDimension customDimension_; private com.google.protobuf.SingleFieldBuilderV3< com.google.analytics.admin.v1beta.CustomDimension, com.google.analytics.admin.v1beta.CustomDimension.Builder, com.google.analytics.admin.v1beta.CustomDimensionOrBuilder> customDimensionBuilder_; /** * * * <pre> * The CustomDimension to update * </pre> * * <code>.google.analytics.admin.v1beta.CustomDimension custom_dimension = 1;</code> * * @return Whether the customDimension field is set. */ public boolean hasCustomDimension() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The CustomDimension to update * </pre> * * <code>.google.analytics.admin.v1beta.CustomDimension custom_dimension = 1;</code> * * @return The customDimension. */ public com.google.analytics.admin.v1beta.CustomDimension getCustomDimension() { if (customDimensionBuilder_ == null) { return customDimension_ == null ? com.google.analytics.admin.v1beta.CustomDimension.getDefaultInstance() : customDimension_; } else { return customDimensionBuilder_.getMessage(); } } /** * * * <pre> * The CustomDimension to update * </pre> * * <code>.google.analytics.admin.v1beta.CustomDimension custom_dimension = 1;</code> */ public Builder setCustomDimension(com.google.analytics.admin.v1beta.CustomDimension value) { if (customDimensionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } customDimension_ = value; } else { customDimensionBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The CustomDimension to update * </pre> * * <code>.google.analytics.admin.v1beta.CustomDimension custom_dimension = 1;</code> */ public Builder setCustomDimension( com.google.analytics.admin.v1beta.CustomDimension.Builder builderForValue) { if (customDimensionBuilder_ == null) { customDimension_ = builderForValue.build(); } else { customDimensionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The CustomDimension to update * </pre> * * <code>.google.analytics.admin.v1beta.CustomDimension custom_dimension = 1;</code> */ public Builder mergeCustomDimension(com.google.analytics.admin.v1beta.CustomDimension value) { if (customDimensionBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && customDimension_ != null && customDimension_ != com.google.analytics.admin.v1beta.CustomDimension.getDefaultInstance()) { getCustomDimensionBuilder().mergeFrom(value); } else { customDimension_ = value; } } else { customDimensionBuilder_.mergeFrom(value); } if (customDimension_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * The CustomDimension to update * </pre> * * <code>.google.analytics.admin.v1beta.CustomDimension custom_dimension = 1;</code> */ public Builder clearCustomDimension() { bitField0_ = (bitField0_ & ~0x00000001); customDimension_ = null; if (customDimensionBuilder_ != null) { customDimensionBuilder_.dispose(); customDimensionBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The CustomDimension to update * </pre> * * <code>.google.analytics.admin.v1beta.CustomDimension custom_dimension = 1;</code> */ public com.google.analytics.admin.v1beta.CustomDimension.Builder getCustomDimensionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getCustomDimensionFieldBuilder().getBuilder(); } /** * * * <pre> * The CustomDimension to update * </pre> * * <code>.google.analytics.admin.v1beta.CustomDimension custom_dimension = 1;</code> */ public com.google.analytics.admin.v1beta.CustomDimensionOrBuilder getCustomDimensionOrBuilder() { if (customDimensionBuilder_ != null) { return customDimensionBuilder_.getMessageOrBuilder(); } else { return customDimension_ == null ? com.google.analytics.admin.v1beta.CustomDimension.getDefaultInstance() : customDimension_; } } /** * * * <pre> * The CustomDimension to update * </pre> * * <code>.google.analytics.admin.v1beta.CustomDimension custom_dimension = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.analytics.admin.v1beta.CustomDimension, com.google.analytics.admin.v1beta.CustomDimension.Builder, com.google.analytics.admin.v1beta.CustomDimensionOrBuilder> getCustomDimensionFieldBuilder() { if (customDimensionBuilder_ == null) { customDimensionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.analytics.admin.v1beta.CustomDimension, com.google.analytics.admin.v1beta.CustomDimension.Builder, com.google.analytics.admin.v1beta.CustomDimensionOrBuilder>( getCustomDimension(), getParentForChildren(), isClean()); customDimension_ = null; } return customDimensionBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. To replace the entire entity, use one path with the string "*" to * match all fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. To replace the entire entity, use one path with the string "*" to * match all fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. To replace the entire entity, use one path with the string "*" to * match all fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. To replace the entire entity, use one path with the string "*" to * match all fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. To replace the entire entity, use one path with the string "*" to * match all fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. To replace the entire entity, use one path with the string "*" to * match all fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. To replace the entire entity, use one path with the string "*" to * match all fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. To replace the entire entity, use one path with the string "*" to * match all fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. To replace the entire entity, use one path with the string "*" to * match all fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.analytics.admin.v1beta.UpdateCustomDimensionRequest) } // @@protoc_insertion_point(class_scope:google.analytics.admin.v1beta.UpdateCustomDimensionRequest) private static final com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest(); } public static com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateCustomDimensionRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateCustomDimensionRequest>() { @java.lang.Override public UpdateCustomDimensionRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateCustomDimensionRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateCustomDimensionRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.analytics.admin.v1beta.UpdateCustomDimensionRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,195
java-analyticshub/proto-google-cloud-analyticshub-v1/src/main/java/com/google/cloud/bigquery/analyticshub/v1/UpdateListingRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/bigquery/analyticshub/v1/analyticshub.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.bigquery.analyticshub.v1; /** * * * <pre> * Message for updating a Listing. * </pre> * * Protobuf type {@code google.cloud.bigquery.analyticshub.v1.UpdateListingRequest} */ public final class UpdateListingRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.bigquery.analyticshub.v1.UpdateListingRequest) UpdateListingRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateListingRequest.newBuilder() to construct. private UpdateListingRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateListingRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateListingRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.bigquery.analyticshub.v1.AnalyticsHubProto .internal_static_google_cloud_bigquery_analyticshub_v1_UpdateListingRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.bigquery.analyticshub.v1.AnalyticsHubProto .internal_static_google_cloud_bigquery_analyticshub_v1_UpdateListingRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest.class, com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest.Builder.class); } private int bitField0_; public static final int UPDATE_MASK_FIELD_NUMBER = 1; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Required. Field mask specifies the fields to update in the listing * resource. The fields specified in the `updateMask` are relative to the * resource and are not a full request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Field mask specifies the fields to update in the listing * resource. The fields specified in the `updateMask` are relative to the * resource and are not a full request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Required. Field mask specifies the fields to update in the listing * resource. The fields specified in the `updateMask` are relative to the * resource and are not a full request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } public static final int LISTING_FIELD_NUMBER = 2; private com.google.cloud.bigquery.analyticshub.v1.Listing listing_; /** * * * <pre> * Required. The listing to update. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.Listing listing = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the listing field is set. */ @java.lang.Override public boolean hasListing() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The listing to update. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.Listing listing = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The listing. */ @java.lang.Override public com.google.cloud.bigquery.analyticshub.v1.Listing getListing() { return listing_ == null ? com.google.cloud.bigquery.analyticshub.v1.Listing.getDefaultInstance() : listing_; } /** * * * <pre> * Required. The listing to update. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.Listing listing = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.bigquery.analyticshub.v1.ListingOrBuilder getListingOrBuilder() { return listing_ == null ? com.google.cloud.bigquery.analyticshub.v1.Listing.getDefaultInstance() : listing_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getUpdateMask()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getListing()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getUpdateMask()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getListing()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest)) { return super.equals(obj); } com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest other = (com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest) obj; if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (hasListing() != other.hasListing()) return false; if (hasListing()) { if (!getListing().equals(other.getListing())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } if (hasListing()) { hash = (37 * hash) + LISTING_FIELD_NUMBER; hash = (53 * hash) + getListing().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Message for updating a Listing. * </pre> * * Protobuf type {@code google.cloud.bigquery.analyticshub.v1.UpdateListingRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.analyticshub.v1.UpdateListingRequest) com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.bigquery.analyticshub.v1.AnalyticsHubProto .internal_static_google_cloud_bigquery_analyticshub_v1_UpdateListingRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.bigquery.analyticshub.v1.AnalyticsHubProto .internal_static_google_cloud_bigquery_analyticshub_v1_UpdateListingRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest.class, com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest.Builder.class); } // Construct using com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getUpdateMaskFieldBuilder(); getListingFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } listing_ = null; if (listingBuilder_ != null) { listingBuilder_.dispose(); listingBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.bigquery.analyticshub.v1.AnalyticsHubProto .internal_static_google_cloud_bigquery_analyticshub_v1_UpdateListingRequest_descriptor; } @java.lang.Override public com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest getDefaultInstanceForType() { return com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest build() { com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest buildPartial() { com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest result = new com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.listing_ = listingBuilder_ == null ? listing_ : listingBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest) { return mergeFrom((com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest other) { if (other == com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest.getDefaultInstance()) return this; if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } if (other.hasListing()) { mergeListing(other.getListing()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getListingFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Required. Field mask specifies the fields to update in the listing * resource. The fields specified in the `updateMask` are relative to the * resource and are not a full request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Field mask specifies the fields to update in the listing * resource. The fields specified in the `updateMask` are relative to the * resource and are not a full request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Required. Field mask specifies the fields to update in the listing * resource. The fields specified in the `updateMask` are relative to the * resource and are not a full request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Field mask specifies the fields to update in the listing * resource. The fields specified in the `updateMask` are relative to the * resource and are not a full request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Field mask specifies the fields to update in the listing * resource. The fields specified in the `updateMask` are relative to the * resource and are not a full request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. Field mask specifies the fields to update in the listing * resource. The fields specified in the `updateMask` are relative to the * resource and are not a full request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000001); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Field mask specifies the fields to update in the listing * resource. The fields specified in the `updateMask` are relative to the * resource and are not a full request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000001; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Field mask specifies the fields to update in the listing * resource. The fields specified in the `updateMask` are relative to the * resource and are not a full request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Required. Field mask specifies the fields to update in the listing * resource. The fields specified in the `updateMask` are relative to the * resource and are not a full request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } private com.google.cloud.bigquery.analyticshub.v1.Listing listing_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.bigquery.analyticshub.v1.Listing, com.google.cloud.bigquery.analyticshub.v1.Listing.Builder, com.google.cloud.bigquery.analyticshub.v1.ListingOrBuilder> listingBuilder_; /** * * * <pre> * Required. The listing to update. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.Listing listing = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the listing field is set. */ public boolean hasListing() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The listing to update. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.Listing listing = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The listing. */ public com.google.cloud.bigquery.analyticshub.v1.Listing getListing() { if (listingBuilder_ == null) { return listing_ == null ? com.google.cloud.bigquery.analyticshub.v1.Listing.getDefaultInstance() : listing_; } else { return listingBuilder_.getMessage(); } } /** * * * <pre> * Required. The listing to update. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.Listing listing = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setListing(com.google.cloud.bigquery.analyticshub.v1.Listing value) { if (listingBuilder_ == null) { if (value == null) { throw new NullPointerException(); } listing_ = value; } else { listingBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The listing to update. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.Listing listing = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setListing( com.google.cloud.bigquery.analyticshub.v1.Listing.Builder builderForValue) { if (listingBuilder_ == null) { listing_ = builderForValue.build(); } else { listingBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The listing to update. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.Listing listing = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeListing(com.google.cloud.bigquery.analyticshub.v1.Listing value) { if (listingBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && listing_ != null && listing_ != com.google.cloud.bigquery.analyticshub.v1.Listing.getDefaultInstance()) { getListingBuilder().mergeFrom(value); } else { listing_ = value; } } else { listingBuilder_.mergeFrom(value); } if (listing_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The listing to update. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.Listing listing = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearListing() { bitField0_ = (bitField0_ & ~0x00000002); listing_ = null; if (listingBuilder_ != null) { listingBuilder_.dispose(); listingBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The listing to update. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.Listing listing = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.bigquery.analyticshub.v1.Listing.Builder getListingBuilder() { bitField0_ |= 0x00000002; onChanged(); return getListingFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The listing to update. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.Listing listing = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.bigquery.analyticshub.v1.ListingOrBuilder getListingOrBuilder() { if (listingBuilder_ != null) { return listingBuilder_.getMessageOrBuilder(); } else { return listing_ == null ? com.google.cloud.bigquery.analyticshub.v1.Listing.getDefaultInstance() : listing_; } } /** * * * <pre> * Required. The listing to update. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.Listing listing = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.bigquery.analyticshub.v1.Listing, com.google.cloud.bigquery.analyticshub.v1.Listing.Builder, com.google.cloud.bigquery.analyticshub.v1.ListingOrBuilder> getListingFieldBuilder() { if (listingBuilder_ == null) { listingBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.bigquery.analyticshub.v1.Listing, com.google.cloud.bigquery.analyticshub.v1.Listing.Builder, com.google.cloud.bigquery.analyticshub.v1.ListingOrBuilder>( getListing(), getParentForChildren(), isClean()); listing_ = null; } return listingBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.analyticshub.v1.UpdateListingRequest) } // @@protoc_insertion_point(class_scope:google.cloud.bigquery.analyticshub.v1.UpdateListingRequest) private static final com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest(); } public static com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateListingRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateListingRequest>() { @java.lang.Override public UpdateListingRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateListingRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateListingRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.bigquery.analyticshub.v1.UpdateListingRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/felix-dev
35,337
http/jetty/src/main/java/org/apache/felix/http/jetty/internal/JettyService.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.felix.http.jetty.internal; import java.net.Inet4Address; import java.net.Inet6Address; import java.net.InetAddress; import java.net.NetworkInterface; import java.net.SocketException; import java.util.ArrayList; import java.util.Collections; import java.util.Dictionary; import java.util.Enumeration; import java.util.Hashtable; import java.util.List; import org.apache.felix.http.base.internal.HttpServiceController; import org.apache.felix.http.base.internal.logger.SystemLogger; import org.eclipse.jetty.alpn.server.ALPNServerConnectionFactory; import org.eclipse.jetty.http.HttpVersion; import org.eclipse.jetty.http.UriCompliance; import org.eclipse.jetty.http2.server.HTTP2ServerConnectionFactory; import org.eclipse.jetty.io.ConnectionStatistics; import org.eclipse.jetty.security.HashLoginService; import org.eclipse.jetty.security.UserStore; import org.eclipse.jetty.server.Connector; import org.eclipse.jetty.server.HttpConfiguration; import org.eclipse.jetty.server.HttpConnectionFactory; import org.eclipse.jetty.server.SecureRequestCustomizer; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.ServerConnector; import org.eclipse.jetty.server.SslConnectionFactory; import org.eclipse.jetty.server.handler.ContextHandlerCollection; import org.eclipse.jetty.server.handler.StatisticsHandler; import org.eclipse.jetty.server.handler.gzip.GzipHandler; import org.eclipse.jetty.server.session.HouseKeeper; import org.eclipse.jetty.server.session.SessionHandler; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHandler; import org.eclipse.jetty.servlet.ServletHolder; import org.eclipse.jetty.util.ssl.SslContextFactory; import org.eclipse.jetty.util.thread.QueuedThreadPool; import org.eclipse.jetty.util.thread.ThreadPool; import org.osgi.framework.Bundle; import org.osgi.framework.BundleContext; import org.osgi.framework.Constants; import org.osgi.framework.InvalidSyntaxException; import org.osgi.framework.ServiceFactory; import org.osgi.framework.ServiceRegistration; import org.osgi.service.http.runtime.HttpServiceRuntimeConstants; import jakarta.servlet.SessionCookieConfig; import jakarta.servlet.SessionTrackingMode; public final class JettyService { /** PID for configuration of the HTTP service. */ public static final String PID = "org.apache.felix.http"; private final JettyConfig config; private final BundleContext context; private final HttpServiceController controller; private volatile ServiceRegistration<?> configServiceReg; private volatile Server server; private volatile ContextHandlerCollection parent; private volatile MBeanServerTracker mbeanServerTracker; private volatile ConnectorFactoryTracker connectorTracker; private volatile RequestLogTracker requestLogTracker; private volatile LogServiceRequestLog osgiRequestLog; private volatile FileRequestLog fileRequestLog; private volatile LoadBalancerCustomizerFactoryTracker loadBalancerCustomizerTracker; private volatile CustomizerWrapper customizerWrapper; private final boolean registerManagedService; private final String jettyVersion; private final boolean immediatelyStartJetty; /** * Shared constructor for JettyService instances. * @param context The bundle context * @param controller The HTTP service controller * @param registerManagedService Whether to register the managed service */ private JettyService(final BundleContext context, final HttpServiceController controller, final boolean registerManagedService) { this.jettyVersion = fixJettyVersion(context); this.context = context; this.config = new JettyConfig(this.context); this.controller = controller; this.registerManagedService = registerManagedService; this.immediatelyStartJetty = !registerManagedService || !this.config.isRequireConfiguration(); } /** * Constructor for the managed service jetty service. * @param context The bundle context * @param controller The HTTP service controller */ public JettyService(final BundleContext context, final HttpServiceController controller) { this(context, controller, true); } /** * Constructor for the managed service factory jetty service. * @param context The bundle context * @param controller The HTTP service controller * @param props The configuration properties */ public JettyService(final BundleContext context, final HttpServiceController controller, final Dictionary<String,?> props) { this(context, controller, false); this.config.update(props); } public void start() throws Exception { if ( this.immediatelyStartJetty) { // FELIX-4422: start Jetty synchronously... startJetty(); } if (this.registerManagedService) { final Dictionary<String, Object> props = new Hashtable<>(); props.put(Constants.SERVICE_PID, PID); props.put(Constants.SERVICE_VENDOR, "The Apache Software Foundation"); props.put(Constants.SERVICE_DESCRIPTION, "Managed Service for the Jetty Http Service"); this.configServiceReg = this.context.registerService("org.osgi.service.cm.ManagedService", new ServiceFactory() { @Override public Object getService(final Bundle bundle, final ServiceRegistration registration) { return new JettyManagedService(JettyService.this); } @Override public void ungetService(Bundle bundle, ServiceRegistration registration, Object service) { // nothing to do } }, props); } } public void stop() throws Exception { if (this.configServiceReg != null) { try { // ignore potential exception on shutdown this.configServiceReg.unregister(); } catch (final IllegalStateException e) { // ignore } this.configServiceReg = null; } // FELIX-4422: stop Jetty synchronously... stopJetty(); } private Hashtable<String, Object> getServiceProperties() { Hashtable<String, Object> props = new Hashtable<>(); // Add some important configuration properties... this.config.setServiceProperties(props); addEndpointProperties(props, null); // propagate the new service properties to the actual HTTP service... return props; } public void updated(final Dictionary<String, ?> props) { final boolean changed = this.config.update(props); if (props == null && !this.immediatelyStartJetty) { // null is only passed for the managed service stopJetty(); } else if (changed) { // Something changed in our configuration, restart Jetty... stopJetty(); startJetty(); } } private void startJetty() { try { initializeJetty(); } catch (Exception e) { SystemLogger.LOGGER.error("Exception while initializing Jetty", e); } } private void stopJetty() { if (this.server != null) { this.controller.getEventDispatcher().setActive(false); this.controller.unregister(); if (this.fileRequestLog != null) { this.fileRequestLog.stop(); this.fileRequestLog = null; } if (this.osgiRequestLog != null) { this.osgiRequestLog.unregister(); this.osgiRequestLog = null; } if (this.requestLogTracker != null) { this.requestLogTracker.close(); this.requestLogTracker = null; } if (this.connectorTracker != null) { this.connectorTracker.close(); this.connectorTracker = null; } if (this.loadBalancerCustomizerTracker != null) { this.loadBalancerCustomizerTracker.close(); this.loadBalancerCustomizerTracker = null; } try { this.server.stop(); this.server = null; SystemLogger.LOGGER.info("Stopped Jetty"); } catch (Exception e) { SystemLogger.LOGGER.error("Exception while stopping Jetty", e); } if (this.mbeanServerTracker != null) { this.mbeanServerTracker.close(); this.mbeanServerTracker = null; } } } private void initializeJetty() throws Exception { if (this.config.isUseHttp() || this.config.isUseHttps()) { final int threadPoolMax = this.config.getThreadPoolMax(); if (threadPoolMax >= 0) { this.server = new Server( new QueuedThreadPool(threadPoolMax) ); } else { this.server = new Server(); } // FELIX-5931 : PropertyUserStore used as default by HashLoginService has changed in 9.4.12.v20180830 // and fails without a config, therefore using plain UserStore final HashLoginService loginService = new HashLoginService("OSGi HTTP Service Realm"); loginService.setUserStore(new UserStore()); this.server.addBean(loginService); this.parent = new ContextHandlerCollection(); ServletContextHandler context = new ServletContextHandler(this.parent, this.config.getContextPath(), ServletContextHandler.SESSIONS); configureSessionManager(context); this.controller.getEventDispatcher().setActive(true); context.addEventListener(controller.getEventDispatcher()); context.getSessionHandler().addEventListener(controller.getEventDispatcher()); final ServletHolder holder = new ServletHolder(this.controller.createDispatcherServlet()); holder.setAsyncSupported(true); context.addServlet(holder, "/*"); context.setMaxFormContentSize(this.config.getMaxFormSize()); if (this.config.isRegisterMBeans()) { this.mbeanServerTracker = new MBeanServerTracker(this.context, this.server); this.mbeanServerTracker.open(); if (!this.config.isStatisticsHandlerEnabled()) { context.addBean(new StatisticsHandler()); } } this.server.setHandler(this.parent); if (this.config.isStatisticsHandlerEnabled()) { StatisticsHandler statisticsHandler = new StatisticsHandler(); this.server.insertHandler(statisticsHandler); if (this.config.isRegisterMBeans()) { context.addBean(statisticsHandler); } } if (this.config.isGzipHandlerEnabled()) { GzipHandler gzipHandler = new GzipHandler(); gzipHandler.setMinGzipSize(this.config.getGzipMinGzipSize()); gzipHandler.setInflateBufferSize(this.config.getGzipInflateBufferSize()); gzipHandler.setSyncFlush(this.config.isGzipSyncFlush()); gzipHandler.addIncludedMethods(this.config.getGzipIncludedMethods()); gzipHandler.addExcludedMethods(this.config.getGzipExcludedMethods()); gzipHandler.addIncludedPaths(this.config.getGzipIncludedPaths()); gzipHandler.addExcludedPaths(this.config.getGzipExcludedPaths()); gzipHandler.addIncludedMimeTypes(this.config.getGzipIncludedMimeTypes()); gzipHandler.addExcludedMimeTypes(this.config.getGzipExcludedMimeTypes()); this.server.insertHandler(gzipHandler); } if(this.config.getStopTimeout() != -1) { this.server.setStopTimeout(this.config.getStopTimeout()); } if (this.config.isUseJettyWebsocket()) { maybeInitializeJettyWebsocket(context); } if (this.config.isUseJakartaWebsocket()) { maybeInitializeJakartaWebsocket(context); } this.server.start(); maybeStoreWebSocketContainerAttributes(context); // session id manager is only available after server is started context.getSessionHandler().getSessionIdManager().getSessionHouseKeeper().setIntervalSec( this.config.getLongProperty(JettyConfig.FELIX_JETTY_SESSION_SCAVENGING_INTERVAL, HouseKeeper.DEFAULT_PERIOD_MS / 1000L)); if (this.config.isProxyLoadBalancerConnection()) { customizerWrapper = new CustomizerWrapper(); this.loadBalancerCustomizerTracker = new LoadBalancerCustomizerFactoryTracker(this.context, customizerWrapper); this.loadBalancerCustomizerTracker.open(); } final StringBuilder message = new StringBuilder("Started Jetty ").append(this.jettyVersion).append(" at port(s)"); if (this.config.isUseHttp() && initializeHttp()) { message.append(" HTTP:").append(this.config.getHttpPort()); } if (this.config.isUseHttps() && initializeHttps()) { message.append(" HTTPS:").append(this.config.getHttpsPort()); } this.connectorTracker = new ConnectorFactoryTracker(this.context, this.server); this.connectorTracker.open(); if (this.server.getConnectors() != null && this.server.getConnectors().length > 0) { message.append(" on context path ").append(this.config.getContextPath()); message.append(" ["); ThreadPool threadPool = this.server.getThreadPool(); if (threadPool instanceof ThreadPool.SizedThreadPool) { ThreadPool.SizedThreadPool sizedThreadPool = (ThreadPool.SizedThreadPool) threadPool; message.append("minThreads=").append(sizedThreadPool.getMinThreads()).append(","); message.append("maxThreads=").append(sizedThreadPool.getMaxThreads()).append(","); } Connector connector = this.server.getConnectors()[0]; if (connector instanceof ServerConnector) { @SuppressWarnings("resource") ServerConnector serverConnector = (ServerConnector) connector; message.append("acceptors=").append(serverConnector.getAcceptors()).append(","); message.append("selectors=").append(serverConnector.getSelectorManager().getSelectorCount()); } message.append("]"); SystemLogger.LOGGER.info(message.toString()); this.controller.register(context.getServletContext(), getServiceProperties()); } else { this.stopJetty(); SystemLogger.LOGGER.error("Jetty stopped (no connectors available)"); } try { this.requestLogTracker = new RequestLogTracker(this.context, this.config.getRequestLogFilter()); this.requestLogTracker.open(); this.server.setRequestLog(requestLogTracker); } catch (InvalidSyntaxException e) { SystemLogger.LOGGER.error("Invalid filter syntax in request log tracker", e); } if (this.config.isRequestLogOSGiEnabled()) { this.osgiRequestLog = new LogServiceRequestLog(this.config); this.osgiRequestLog.register(this.context); SystemLogger.LOGGER.info("Directing Jetty request logs to the OSGi Log Service"); } if (this.config.getRequestLogFilePath() != null && !this.config.getRequestLogFilePath().isEmpty()) { this.fileRequestLog = new FileRequestLog(config); this.fileRequestLog.start(this.context); SystemLogger.LOGGER.info("Directing Jetty request logs to {}", this.config.getRequestLogFilePath()); } } else { SystemLogger.LOGGER.warn("Jetty not started (HTTP and HTTPS disabled)"); } } private static String fixJettyVersion(final BundleContext ctx) { // FELIX-4311: report the real version of Jetty... final Dictionary<String, String> headers = ctx.getBundle().getHeaders(); String version = headers.get("X-Jetty-Version"); if (version != null) { System.setProperty("jetty.version", version); } else { version = Server.getVersion(); } return version; } private boolean initializeHttp() { HttpConnectionFactory connFactory = new HttpConnectionFactory(); configureHttpConnectionFactory(connFactory); ServerConnector connector = new ServerConnector( server, config.getAcceptors(), config.getSelectors(), connFactory ); configureConnector(connector, this.config.getHttpPort()); if (this.config.isProxyLoadBalancerConnection()) { connFactory.getHttpConfiguration().addCustomizer(customizerWrapper); } return startConnector(connector); } private boolean initializeHttps() { HttpConnectionFactory connFactory = new HttpConnectionFactory(); configureHttpConnectionFactory(connFactory); SslContextFactory.Server sslContextFactory = new SslContextFactory.Server(); configureSslContextFactory(sslContextFactory); ServerConnector connector = new ServerConnector( server, config.getAcceptors(), config.getSelectors(), new SslConnectionFactory(sslContextFactory, HttpVersion.HTTP_1_1.toString()), connFactory ); HttpConfiguration httpConfiguration = connFactory.getHttpConfiguration(); httpConfiguration.addCustomizer(new SecureRequestCustomizer()); if (this.config.isProxyLoadBalancerConnection()) { httpConfiguration.addCustomizer(customizerWrapper); } if (this.config.isUseHttp2()) { //add ALPN factory SslConnectionFactory alpnConnFactory = new SslConnectionFactory(sslContextFactory, "alpn"); connector.addConnectionFactory(alpnConnFactory); ALPNServerConnectionFactory alpn = new ALPNServerConnectionFactory(this.config.getAlpnProtocols()); alpn.setDefaultProtocol(this.config.getAlpnDefaultProtocol()); connector.addConnectionFactory(alpn); //Configure a HTTP2 on the ssl connector HTTP2ServerConnectionFactory http2factory = new HTTP2ServerConnectionFactory(httpConfiguration); http2factory.setMaxConcurrentStreams(this.config.getHttp2MaxConcurrentStreams()); http2factory.setInitialStreamRecvWindow(this.config.getHttp2InitialStreamRecvWindow()); http2factory.setInitialSessionRecvWindow(this.config.getHttp2InitialSessionRecvWindow()); connector.addConnectionFactory(http2factory); //use http/2 cipher comparator sslContextFactory.setCipherComparator(org.eclipse.jetty.http2.HTTP2Cipher.COMPARATOR); sslContextFactory.setUseCipherSuitesOrder(true); } configureConnector(connector, this.config.getHttpsPort()); return startConnector(connector); } /** * Initialize the jakarta websocket support for the servlet context handler. * If the optional initializer class is not present then a warning will be logged. * * @param handler the sevlet context handler to initialize */ private void maybeInitializeJakartaWebsocket(ServletContextHandler handler) { if (isClassNameVisible("org.eclipse.jetty.websocket.jakarta.server.config.JakartaWebSocketServletContainerInitializer")) { // Ensure that JavaxWebSocketServletContainerInitializer is initialized, // to setup the ServerContainer for this web application context. org.eclipse.jetty.websocket.jakarta.server.config.JakartaWebSocketServletContainerInitializer.configure(handler, null); } else { SystemLogger.LOGGER.warn("Failed to initialize jakarta standard websocket support since the initializer class was not found. " + "Check if the websocket-jakarta-server bundle is deployed."); } } /** * Initialize the jetty websocket support for the servlet context handler. * If the optional initializer class is not present then a warning will be logged. * * @param handler the sevlet context handler to initialize */ private void maybeInitializeJettyWebsocket(ServletContextHandler handler) { if (isClassNameVisible("org.eclipse.jetty.websocket.server.config.JettyWebSocketServletContainerInitializer")) { // Ensure that JettyWebSocketServletContainerInitializer is initialized, // to setup the JettyWebSocketServerContainer for this web application context. org.eclipse.jetty.websocket.server.config.JettyWebSocketServletContainerInitializer.configure(handler, null); } else { SystemLogger.LOGGER.warn("Failed to initialize jetty specific websocket support since the initializer class was not found. " + "Check if the websocket-jetty-server bundle is deployed."); } } /** * Based on the configuration, store the WebSocket container attributes for the shared servlet context. * * @param context the context */ private void maybeStoreWebSocketContainerAttributes(ServletContextHandler context) { // when the server is started, retrieve the container attribute and // set it on the shared servlet context once available if (this.config.isUseJettyWebsocket() && isClassNameVisible("org.eclipse.jetty.websocket.server.config.JettyWebSocketServletContainerInitializer")) { String attribute = org.eclipse.jetty.websocket.server.JettyWebSocketServerContainer.JETTY_WEBSOCKET_CONTAINER_ATTRIBUTE; this.controller.setAttributeSharedServletContext(attribute, context.getServletContext().getAttribute(attribute)); } if (this.config.isUseJakartaWebsocket() && isClassNameVisible("org.eclipse.jetty.websocket.jakarta.server.config.JakartaWebSocketServletContainerInitializer")) { String attribute = org.eclipse.jetty.websocket.jakarta.server.config.JakartaWebSocketServletContainerInitializer.ATTR_JAKARTA_SERVER_CONTAINER; this.controller.setAttributeSharedServletContext(attribute, context.getServletContext().getAttribute(attribute)); } } /** * Checks if an optional class name is visible to the bundle classloader * * @param className the class name to check * @return true if the class is visible, false otherwise */ private boolean isClassNameVisible(String className) { boolean visible; try { // check if the class is visible to our classloader getClass().getClassLoader().loadClass(className); visible = true; } catch (ClassNotFoundException e) { visible = false; } return visible; } private void configureSslContextFactory(final SslContextFactory.Server connector) { if (this.config.getKeystoreType() != null) { connector.setKeyStoreType(this.config.getKeystoreType()); } if (this.config.getKeystore() != null) { connector.setKeyStorePath(this.config.getKeystore()); } if (this.config.getPassword() != null) { connector.setKeyStorePassword(this.config.getPassword()); } if (this.config.getKeyPassword() != null) { connector.setKeyManagerPassword(this.config.getKeyPassword()); } if (this.config.getTruststoreType() != null) { connector.setTrustStoreType(this.config.getTruststoreType()); } if (this.config.getTruststore() != null) { connector.setTrustStorePath(this.config.getTruststore()); } if (this.config.getTrustPassword() != null) { connector.setTrustStorePassword(this.config.getTrustPassword()); } if ("wants".equalsIgnoreCase(this.config.getClientcert())) { connector.setWantClientAuth(true); } else if ("needs".equalsIgnoreCase(this.config.getClientcert())) { connector.setNeedClientAuth(true); } if (this.config.getExcludedCipherSuites() != null) { connector.setExcludeCipherSuites(this.config.getExcludedCipherSuites()); } if (this.config.getIncludedCipherSuites() != null) { connector.setIncludeCipherSuites(this.config.getIncludedCipherSuites()); } if (this.config.getIncludedProtocols() != null) { connector.setIncludeProtocols(this.config.getIncludedProtocols()); } if (this.config.getExcludedProtocols() != null) { connector.setExcludeProtocols(this.config.getExcludedProtocols()); } connector.setRenegotiationAllowed(this.config.isRenegotiationAllowed()); } private void configureConnector(final ServerConnector connector, int port) { connector.setPort(port); connector.setHost(this.config.getHost()); connector.setIdleTimeout(this.config.getHttpTimeout()); if (this.config.isRegisterMBeans()) { connector.addBean(new ConnectionStatistics()); } } private void configureHttpConnectionFactory(HttpConnectionFactory connFactory) { final HttpConfiguration config = connFactory.getHttpConfiguration(); config.setRequestHeaderSize(this.config.getHeaderSize()); config.setResponseHeaderSize(this.config.getHeaderSize()); config.setOutputBufferSize(this.config.getResponseBufferSize()); final String uriComplianceMode = this.config.getProperty(JettyConfig.FELIX_JETTY_URI_COMPLIANCE_MODE, null); if (uriComplianceMode != null) { try { config.setUriCompliance(UriCompliance.valueOf(uriComplianceMode)); } catch (IllegalArgumentException e) { SystemLogger.LOGGER.warn("Invalid URI compliance mode: {}", uriComplianceMode); } } // HTTP/1.1 requires Date header if possible (it is) config.setSendDateHeader(true); config.setSendServerVersion(this.config.isSendServerHeader()); config.setSendXPoweredBy(this.config.isSendServerHeader()); connFactory.setInputBufferSize(this.config.getRequestBufferSize()); } private void configureSessionManager(final ServletContextHandler context) throws Exception { final SessionHandler sessionHandler = context.getSessionHandler(); sessionHandler.setMaxInactiveInterval(this.config.getSessionTimeout() * 60); sessionHandler.setSessionIdPathParameterName(this.config.getProperty(JettyConfig.FELIX_JETTY_SERVLET_SESSION_ID_PATH_PARAMETER_NAME, SessionHandler.__DefaultSessionIdPathParameterName)); sessionHandler.setCheckingRemoteSessionIdEncoding(this.config.getBooleanProperty(JettyConfig.FELIX_JETTY_SERVLET_CHECK_REMOTE_SESSION_ENCODING, true)); sessionHandler.setSessionTrackingModes(Collections.singleton(SessionTrackingMode.COOKIE)); final SessionCookieConfig cookieConfig = sessionHandler.getSessionCookieConfig(); cookieConfig.setName(this.config.getProperty(JettyConfig.FELIX_JETTY_SERVLET_SESSION_COOKIE_NAME, SessionHandler.__DefaultSessionCookie)); cookieConfig.setDomain(this.config.getProperty(JettyConfig.FELIX_JETTY_SERVLET_SESSION_DOMAIN, SessionHandler.__DefaultSessionDomain)); cookieConfig.setPath(this.config.getProperty(JettyConfig.FELIX_JETTY_SERVLET_SESSION_PATH, context.getContextPath())); cookieConfig.setMaxAge(this.config.getIntProperty(JettyConfig.FELIX_JETTY_SERVLET_SESSION_MAX_AGE, -1)); cookieConfig.setHttpOnly(this.config.getBooleanProperty(JettyConfig.FELIX_JETTY_SESSION_COOKIE_HTTP_ONLY, true)); cookieConfig.setSecure(this.config.getBooleanProperty(JettyConfig.FELIX_JETTY_SESSION_COOKIE_SECURE, false)); } private boolean startConnector(Connector connector) { this.server.addConnector(connector); try { connector.start(); return true; } catch (Exception e) { this.server.removeConnector(connector); SystemLogger.LOGGER.error("Failed to start Connector: {}", connector, e); } return false; } private String getEndpoint(final Connector listener, final InetAddress ia) { if (ia.isLoopbackAddress()) { return null; } String address = ia.getHostAddress().trim().toLowerCase(); if (ia instanceof Inet6Address) { // skip link-local if (address.startsWith("fe80:0:0:0:")) { return null; } address = "[" + address + "]"; } else if (!(ia instanceof Inet4Address)) { return null; } return getEndpoint(listener, address); } private ServerConnector getServerConnector(Connector connector) { if (connector instanceof ServerConnector) { return (ServerConnector) connector; } throw new IllegalArgumentException("Connection instance not of type ServerConnector " + connector); } private String getEndpoint(final Connector listener, final String hostname) { final StringBuilder sb = new StringBuilder(); sb.append("http"); int defaultPort = 80; //SslConnectionFactory protocol is SSL-HTTP1.0 if (getServerConnector(listener).getDefaultProtocol().startsWith("SSL")) { sb.append('s'); defaultPort = 443; } sb.append("://"); sb.append(hostname); if (getServerConnector(listener).getPort() != defaultPort) { sb.append(':'); sb.append(String.valueOf(getServerConnector(listener).getPort())); } sb.append(config.getContextPath()); return sb.toString(); } private List<String> getEndpoints(final Connector connector, final List<NetworkInterface> interfaces) { final List<String> endpoints = new ArrayList<>(); for (final NetworkInterface ni : interfaces) { final Enumeration<InetAddress> ias = ni.getInetAddresses(); while (ias.hasMoreElements()) { final InetAddress ia = ias.nextElement(); final String endpoint = this.getEndpoint(connector, ia); if (endpoint != null) { endpoints.add(endpoint); } } } return endpoints; } private void addEndpointProperties(final Hashtable<String, Object> props, Object container) { final List<String> endpoints = new ArrayList<>(); final Connector[] connectors = this.server.getConnectors(); if (connectors != null) { for (int i = 0; i < connectors.length; i++) { final Connector connector = connectors[i]; if (getServerConnector(connector).getHost() == null || "0.0.0.0".equals(getServerConnector(connector).getHost())) { try { final List<NetworkInterface> interfaces = new ArrayList<>(); final List<NetworkInterface> loopBackInterfaces = new ArrayList<>(); final Enumeration<NetworkInterface> nis = NetworkInterface.getNetworkInterfaces(); while (nis.hasMoreElements()) { final NetworkInterface ni = nis.nextElement(); if (ni.isLoopback()) { loopBackInterfaces.add(ni); } else { interfaces.add(ni); } } // only add loop back endpoints to the endpoint property if no other endpoint is available. if (!interfaces.isEmpty()) { endpoints.addAll(getEndpoints(connector, interfaces)); } else { endpoints.addAll(getEndpoints(connector, loopBackInterfaces)); } } catch (final SocketException se) { // we ignore this } } else { final String endpoint = this.getEndpoint(connector, getServerConnector(connector).getHost()); if (endpoint != null) { endpoints.add(endpoint); } } } } props.put(HttpServiceRuntimeConstants.HTTP_SERVICE_ENDPOINT, endpoints.toArray(new String[endpoints.size()])); } }
apache/geode
35,188
geode-core/src/main/java/org/apache/geode/internal/cache/PersistentOplogSet.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache; import java.io.File; import java.io.FilenameFilter; import java.io.PrintStream; import java.util.ArrayList; import java.util.Collection; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import it.unimi.dsi.fastutil.longs.LongOpenHashSet; import org.apache.logging.log4j.Logger; import org.apache.geode.annotations.VisibleForTesting; import org.apache.geode.cache.DiskAccessException; import org.apache.geode.internal.cache.DiskStoreImpl.OplogEntryIdSet; import org.apache.geode.internal.cache.entries.DiskEntry; import org.apache.geode.internal.cache.entries.DiskEntry.Helper.ValueWrapper; import org.apache.geode.internal.cache.persistence.DiskRecoveryStore; import org.apache.geode.internal.cache.persistence.DiskRegionView; import org.apache.geode.internal.cache.persistence.DiskStoreFilter; import org.apache.geode.internal.cache.persistence.DiskStoreID; import org.apache.geode.internal.cache.persistence.OplogType; import org.apache.geode.internal.cache.versions.RegionVersionVector; import org.apache.geode.internal.sequencelog.EntryLogger; import org.apache.geode.logging.internal.log4j.api.LogService; public class PersistentOplogSet implements OplogSet { private static final Logger logger = LogService.getLogger(); /** variable to generate sequential unique oplogEntryId's* */ private final AtomicLong oplogEntryId = new AtomicLong(DiskStoreImpl.INVALID_ID); /** * Contains all the oplogs that only have a drf (i.e. the crf has been deleted). */ private final Map<Long, Oplog> drfOnlyOplogs = new LinkedHashMap<>(); private final Map<Long, Oplog> oplogIdToOplog = new LinkedHashMap<>(); /** oplogs that are done being written to but not yet ready to compact */ private final Map<Long, Oplog> inactiveOplogs = new LinkedHashMap<>(16, 0.75f, true); private final DiskStoreImpl parent; private final AtomicInteger inactiveOpenCount = new AtomicInteger(); private final Map<Long, DiskRecoveryStore> pendingRecoveryMap = new HashMap<>(); private final Map<Long, DiskRecoveryStore> currentRecoveryMap = new HashMap<>(); private final AtomicBoolean alreadyRecoveredOnce = new AtomicBoolean(false); private final PrintStream out; /** The active oplog * */ private volatile Oplog child; /** * The maximum oplog id we saw while recovering */ private volatile long maxRecoveredOplogId; /** counter used for round-robin logic * */ private int dirCounter = -1; public PersistentOplogSet(DiskStoreImpl parent, PrintStream out) { this.parent = parent; this.out = out; } /** * returns the active child */ public Oplog getChild() { return child; } /** * set the child to a new oplog */ void setChild(Oplog oplog) { child = oplog; } Oplog[] getAllOplogs() { synchronized (getOplogIdToOplog()) { int rollNum = getOplogIdToOplog().size(); int inactiveNum = inactiveOplogs.size(); int drfOnlyNum = drfOnlyOplogs.size(); int num = rollNum + inactiveNum + drfOnlyNum + 1; Oplog[] oplogs = new Oplog[num]; oplogs[0] = getChild(); Iterator<Oplog> oplogIterator = getOplogIdToOplog().values().iterator(); for (int i = 1; i <= rollNum; i++) { oplogs[i] = oplogIterator.next(); } oplogIterator = inactiveOplogs.values().iterator(); for (int i = 1; i <= inactiveNum; i++) { oplogs[i + rollNum] = oplogIterator.next(); } oplogIterator = drfOnlyOplogs.values().iterator(); for (int i = 1; i <= drfOnlyNum; i++) { oplogs[i + rollNum + inactiveNum] = oplogIterator.next(); } // Special case - no oplogs found if (oplogs.length == 1 && oplogs[0] == null) { return new Oplog[0]; } return oplogs; } } private TreeSet<Oplog> getSortedOplogs() { TreeSet<Oplog> result = new TreeSet<>( (Comparator) (arg0, arg1) -> Long .signum(((Oplog) arg1).getOplogId() - ((Oplog) arg0).getOplogId())); for (Oplog oplog : getAllOplogs()) { if (oplog != null) { result.add(oplog); } } return result; } /** * Get the oplog specified * * @param id int oplogId to be got * @return Oplogs the oplog corresponding to the oplodId, id */ @Override public Oplog getChild(long id) { Oplog localOplog = child; if (localOplog != null && id == localOplog.getOplogId()) { return localOplog; } synchronized (getOplogIdToOplog()) { Oplog result = getOplogIdToOplog().get(id); if (result == null) { result = inactiveOplogs.get(id); } return result; } } @Override public void create(InternalRegion region, DiskEntry entry, ValueWrapper value, boolean async) { getChild().create(region, entry, value, async); } @Override public void modify(InternalRegion region, DiskEntry entry, ValueWrapper value, boolean async) { getChild().modify(region, entry, value, async); } void offlineModify(DiskRegionView drv, DiskEntry entry, byte[] value, boolean isSerializedObject) { getChild().offlineModify(drv, entry, value, isSerializedObject); } @Override public void remove(InternalRegion region, DiskEntry entry, boolean async, boolean isClear) { getChild().remove(region, entry, async, isClear); } public void forceRoll(DiskRegion dr) { Oplog child = getChild(); if (child != null) { child.forceRolling(dr); } } Map<File, DirectoryHolder> findFiles(String partialFileName) { dirCounter = 0; Map<File, DirectoryHolder> backupFiles = new HashMap<>(); FilenameFilter backupFileFilter = getFileNameFilter(partialFileName); for (DirectoryHolder dh : parent.directories) { File[] backupList = dh.getDir().listFiles(backupFileFilter); if (backupList != null) { for (File f : backupList) { backupFiles.put(f, dh); } } } return backupFiles; } private FilenameFilter getFileNameFilter(String partialFileName) { return new DiskStoreFilter(OplogType.BACKUP, false, partialFileName); } void createOplogs(boolean needsOplogs, Map<File, DirectoryHolder> backupFiles) { LongOpenHashSet foundCrfs = new LongOpenHashSet(); LongOpenHashSet foundDrfs = new LongOpenHashSet(); for (Map.Entry<File, DirectoryHolder> entry : backupFiles.entrySet()) { File file = entry.getKey(); String absolutePath = file.getAbsolutePath(); int underscorePosition = absolutePath.lastIndexOf('_'); int pointPosition = absolutePath.lastIndexOf('.'); String oplogIdString = absolutePath.substring(underscorePosition + 1, pointPosition); long oplogId = Long.parseLong(oplogIdString); maxRecoveredOplogId = Math.max(maxRecoveredOplogId, oplogId); // here look diskinit file and check if this opid already deleted or not // if deleted then don't process it. if (Oplog.isCRFFile(file.getName())) { if (!isCrfOplogIdPresent(oplogId)) { deleteFileOnRecovery(file); try { String krfFileName = Oplog.getKRFFilenameFromCRFFilename(file.getAbsolutePath()); File krfFile = new File(krfFileName); deleteFileOnRecovery(krfFile); } catch (Exception ignore) { // ignore } // this file we unable to delete earlier continue; } } else if (Oplog.isDRFFile(file.getName())) { if (!isDrfOplogIdPresent(oplogId)) { deleteFileOnRecovery(file); // this file we unable to delete earlier continue; } } Oplog oplog = getChild(oplogId); if (oplog == null) { oplog = new Oplog(oplogId, this); addRecoveredOplog(oplog); } if (oplog.addRecoveredFile(file, entry.getValue())) { foundCrfs.add(oplogId); } else { foundDrfs.add(oplogId); } } if (needsOplogs) { verifyOplogs(foundCrfs, foundDrfs); } } private boolean isDrfOplogIdPresent(long oplogId) { return parent.getDiskInitFile().isDRFOplogIdPresent(oplogId); } private boolean isCrfOplogIdPresent(long oplogId) { return parent.getDiskInitFile().isCRFOplogIdPresent(oplogId); } private void verifyOplogs(LongOpenHashSet foundCrfs, LongOpenHashSet foundDrfs) { parent.getDiskInitFile().verifyOplogs(foundCrfs, foundDrfs); } private void deleteFileOnRecovery(File f) { try { f.delete(); } catch (Exception ignore) { // ignore, one more attempt to delete the file failed } } private void addRecoveredOplog(Oplog oplog) { basicAddToBeCompacted(oplog); // don't schedule a compaction here. Wait for recovery to complete } /** * Taking a lock on the LinkedHashMap oplogIdToOplog as it the operation of adding an Oplog to the * Map & notifying the Compactor thread , if not already compaction has to be an atomic operation. * add the oplog to the to be compacted set. if compactor thread is active and recovery is not * going on then the compactor thread is notified of the addition */ void addToBeCompacted(Oplog oplog) { basicAddToBeCompacted(oplog); parent.scheduleCompaction(); } private void basicAddToBeCompacted(Oplog oplog) { if (!oplog.isRecovering() && oplog.hasNoLiveValues()) { oplog.cancelKrf(); oplog.close(); oplog.deleteFiles(oplog.getHasDeletes()); } else { parent.getStats().incCompactableOplogs(1); Long key = oplog.getOplogId(); int inactivePromotedCount = 0; synchronized (getOplogIdToOplog()) { if (inactiveOplogs.remove(key) != null) { if (oplog.isRAFOpen()) { inactiveOpenCount.decrementAndGet(); } inactivePromotedCount++; } getOplogIdToOplog().put(key, oplog); } if (inactivePromotedCount > 0) { parent.getStats().incInactiveOplogs(-inactivePromotedCount); } } } void recoverRegionsThatAreReady() { // The following sync also prevents concurrent recoveries by multiple regions // which is needed currently. synchronized (getAlreadyRecoveredOnce()) { // need to take a snapshot of DiskRecoveryStores we will recover synchronized (pendingRecoveryMap) { currentRecoveryMap.clear(); currentRecoveryMap.putAll(pendingRecoveryMap); pendingRecoveryMap.clear(); } if (currentRecoveryMap.isEmpty() && getAlreadyRecoveredOnce().get()) { // no recovery needed return; } for (DiskRecoveryStore drs : currentRecoveryMap.values()) { // Call prepare early to fix bug 41119. drs.getDiskRegionView().prepareForRecovery(); } if (!getAlreadyRecoveredOnce().get()) { initOplogEntryId(); // Fix for #43026 - make sure we don't reuse an entry // id that has been marked as cleared. updateOplogEntryId(parent.getDiskInitFile().getMaxRecoveredClearEntryId()); } long start = parent.getStats().startRecovery(); EntryLogger.setSource(parent.getDiskStoreID(), "recovery"); long byteCount = 0; try { byteCount = recoverOplogs(byteCount); } finally { Map<String, Integer> prSizes = null; Map<String, Integer> prBuckets = null; if (parent.isValidating()) { prSizes = new HashMap<>(); prBuckets = new HashMap<>(); } for (DiskRecoveryStore drs : currentRecoveryMap.values()) { for (Oplog oplog : getAllOplogs()) { if (oplog != null) { // Need to do this AFTER recovery to protect from concurrent compactions // trying to remove the oplogs. // We can't remove a dr from the oplog's unrecoveredRegionCount // until it is fully recovered. // This fixes bug 41119. oplog.checkForRecoverableRegion(drs.getDiskRegionView()); } } if (parent.isValidating()) { if (drs instanceof ValidatingDiskRegion) { ValidatingDiskRegion vdr = (ValidatingDiskRegion) drs; if (vdr.isBucket()) { String prName = vdr.getPrName(); if (prSizes.containsKey(prName)) { int oldSize = prSizes.get(prName); oldSize += vdr.size(); prSizes.put(prName, oldSize); int oldBuckets = prBuckets.get(prName); oldBuckets++; prBuckets.put(prName, oldBuckets); } else { prSizes.put(prName, vdr.size()); prBuckets.put(prName, 1); } } else { parent.incLiveEntryCount(vdr.size()); out.println(vdr.getName() + ": entryCount=" + vdr.size()); } } } } if (parent.isValidating()) { for (Map.Entry<String, Integer> me : prSizes.entrySet()) { parent.incLiveEntryCount(me.getValue()); out.println(me.getKey() + " entryCount=" + me.getValue() + " bucketCount=" + prBuckets.get(me.getKey())); } } parent.getStats().endRecovery(start, byteCount); getAlreadyRecoveredOnce().set(true); currentRecoveryMap.clear(); EntryLogger.clearSource(); } } } private long recoverOplogs(long byteCount) { OplogEntryIdSet deletedIds = new OplogEntryIdSet(parent.DRF_HASHMAP_OVERFLOW_THRESHOLD); TreeSet<Oplog> oplogSet = getSortedOplogs(); if (!getAlreadyRecoveredOnce().get()) { if (getChild() != null && !getChild().hasBeenUsed()) { // Then remove the current child since it is empty // and does not need to be recovered from // and it is important to not call initAfterRecovery on it. oplogSet.remove(getChild()); } } Set<Oplog> oplogsNeedingValueRecovery = new HashSet<>(); if (!oplogSet.isEmpty()) { long startOpLogRecovery = System.currentTimeMillis(); // first figure out all entries that have been destroyed boolean latestOplog = true; for (Oplog oplog : oplogSet) { byteCount += oplog.recoverDrf(deletedIds, getAlreadyRecoveredOnce().get(), latestOplog); latestOplog = false; if (!getAlreadyRecoveredOnce().get()) { updateOplogEntryId(oplog.getMaxRecoveredOplogEntryId()); } } parent.incDeadRecordCount(deletedIds.size()); // now figure out live entries latestOplog = true; for (Oplog oplog : oplogSet) { long startOpLogRead = parent.getStats().startOplogRead(); long bytesRead = oplog.recoverCrf(deletedIds, recoverValues(), recoverValuesSync(), getAlreadyRecoveredOnce().get(), oplogsNeedingValueRecovery, latestOplog); latestOplog = false; if (!getAlreadyRecoveredOnce().get()) { updateOplogEntryId(oplog.getMaxRecoveredOplogEntryId()); } byteCount += bytesRead; parent.getStats().endOplogRead(startOpLogRead, bytesRead); // Callback to the disk regions to indicate the oplog is recovered // Used for offline export for (DiskRecoveryStore drs : currentRecoveryMap.values()) { drs.getDiskRegionView().oplogRecovered(oplog.oplogId); } } long endOpLogRecovery = System.currentTimeMillis(); long elapsed = endOpLogRecovery - startOpLogRecovery; logger.info("recovery oplog load took {} ms", elapsed); } if (!parent.isOfflineCompacting()) { long startRegionInit = System.currentTimeMillis(); // create the oplogs now so that loadRegionData can have them available // Create an array of Oplogs so that we are able to add it in a single shot // to the map for (DiskRecoveryStore drs : currentRecoveryMap.values()) { drs.getDiskRegionView().initRecoveredEntryCount(); } if (!getAlreadyRecoveredOnce().get()) { for (Oplog oplog : oplogSet) { if (oplog != getChild()) { oplog.initAfterRecovery(parent.isOffline()); } } if (getChild() == null) { setFirstChild(getSortedOplogs(), false); } } if (!parent.isOffline()) { if (recoverValues() && !recoverValuesSync()) { // value recovery defers compaction because it is using the compactor thread parent.scheduleValueRecovery(oplogsNeedingValueRecovery, currentRecoveryMap); } if (!getAlreadyRecoveredOnce().get()) { // Create krfs for oplogs that are missing them for (Oplog oplog : oplogSet) { if (oplog.needsKrf()) { oplog.createKrfAsync(); } } parent.scheduleCompaction(); } long endRegionInit = System.currentTimeMillis(); logger.info("recovery region initialization took {} ms", endRegionInit - startRegionInit); } } return byteCount; } private boolean recoverValuesSync() { return parent.RECOVER_VALUES_SYNC; } private boolean recoverValues() { return parent.RECOVER_VALUES; } private void setFirstChild(TreeSet<Oplog> oplogSet, boolean force) { if (parent.isOffline() && !parent.isOfflineCompacting() && !parent.isOfflineModify()) { return; } if (!oplogSet.isEmpty()) { Oplog first = oplogSet.first(); DirectoryHolder dh = first.getDirectoryHolder(); dirCounter = dh.getArrayIndex(); dirCounter = ++dirCounter % parent.dirLength; // we want the first child to go in the directory after the directory // used by the existing oplog with the max id. // This fixes bug 41822. } if (force || maxRecoveredOplogId > 0) { setChild(new Oplog(maxRecoveredOplogId + 1, this, getNextDir())); } } private void initOplogEntryId() { oplogEntryId.set(DiskStoreImpl.INVALID_ID); } /** * Sets the last created oplogEntryId to the given value if and only if the given value is greater * than the current last created oplogEntryId */ private void updateOplogEntryId(long v) { long curVal; do { curVal = oplogEntryId.get(); if (curVal >= v) { // no need to set return; } } while (!oplogEntryId.compareAndSet(curVal, v)); } /** * Returns the last created oplogEntryId. Returns INVALID_ID if no oplogEntryId has been created. */ long getOplogEntryId() { parent.initializeIfNeeded(); return oplogEntryId.get(); } /** * Creates and returns a new oplogEntryId for the given key. An oplogEntryId is needed when * storing a key/value pair on disk. A new one is only needed if the key is new. Otherwise the * oplogEntryId already allocated for a key can be reused for the same key. * * @return A disk id that can be used to access this key/value pair on disk */ long newOplogEntryId() { return oplogEntryId.incrementAndGet(); } /** * Returns the next available DirectoryHolder which has space. If no dir has space then it will * return one anyway if compaction is enabled. * * @param minAvailableSpace the minimum amount of space we need in this directory. */ DirectoryHolder getNextDir(long minAvailableSpace, boolean checkForWarning) { if (minAvailableSpace < parent.getMaxOplogSizeInBytes() && !DiskStoreImpl.SET_IGNORE_PREALLOCATE) { minAvailableSpace = parent.getMaxOplogSizeInBytes(); } DirectoryHolder selectedHolder = null; synchronized (parent.getDirectoryHolders()) { for (int i = 0; i < parent.dirLength; ++i) { DirectoryHolder dirHolder = parent.directories[dirCounter]; // Increment the directory counter to next position so that next time when this operation // is invoked, it checks for the Directory Space in a cyclical fashion. dirCounter = ++dirCounter % parent.dirLength; // if the current directory has some space, then quit and return the dir if (dirHolder.getAvailableSpace() >= minAvailableSpace) { if (checkForWarning && !parent.isDirectoryUsageNormal(dirHolder)) { if (logger.isDebugEnabled()) { logger.debug("Ignoring directory {} due to insufficient disk space", dirHolder); } } else { selectedHolder = dirHolder; break; } } } if (selectedHolder == null) { // we didn't find a warning-free directory, try again ignoring the check if (checkForWarning) { return getNextDir(minAvailableSpace, false); } if (parent.isCompactionEnabled()) { selectedHolder = parent.directories[dirCounter]; // Increment the directory counter to next position dirCounter = ++dirCounter % parent.dirLength; if (selectedHolder.getAvailableSpace() < minAvailableSpace) { logger.warn( "Even though the configured directory size limit has been exceeded a new oplog will be created because compaction is enabled. The configured limit is {}. The current space used in the directory by this disk store is {}.", selectedHolder.getUsedSpace(), selectedHolder.getCapacity()); } } else { throw new DiskAccessException( "Disk is full and compaction is disabled. No space can be created", parent); } } } return selectedHolder; } DirectoryHolder getNextDir() { return getNextDir(DiskStoreImpl.MINIMUM_DIR_SIZE, true); } private void addDrf(Oplog oplog) { synchronized (getOplogIdToOplog()) { drfOnlyOplogs.put(oplog.getOplogId(), oplog); } } void removeDrf(Oplog oplog) { synchronized (getOplogIdToOplog()) { drfOnlyOplogs.remove(oplog.getOplogId()); } } /** * Return true if id is less than all the ids in the oplogIdToOplog map. Since the oldest one is * in the LINKED hash map is first we only need to compare ourselves to it. */ boolean isOldestExistingOplog(long id) { synchronized (getOplogIdToOplog()) { for (long otherId : getOplogIdToOplog().keySet()) { if (id > otherId) { return false; } } // since the inactiveOplogs map is an LRU we need to check each one for (long otherId : inactiveOplogs.keySet()) { if (id > otherId) { return false; } } } return true; } /** * Destroy all the oplogs that are: * * <pre> * 1. the oldest (based on smallest oplog id) * 2. empty (have no live values) * </pre> */ void destroyOldestReadyToCompact() { synchronized (getOplogIdToOplog()) { if (drfOnlyOplogs.isEmpty()) { return; } } Oplog oldestLiveOplog = getOldestLiveOplog(); List<Oplog> toDestroy = new ArrayList<>(); if (oldestLiveOplog == null) { // remove all oplogs that are empty synchronized (getOplogIdToOplog()) { toDestroy.addAll(drfOnlyOplogs.values()); } } else { // remove all empty oplogs that are older than the oldest live one synchronized (getOplogIdToOplog()) { for (Oplog oplog : drfOnlyOplogs.values()) { if (oplog.getOplogId() < oldestLiveOplog.getOplogId()) { toDestroy.add(oplog); } } } } for (Oplog oplog : toDestroy) { oplog.destroy(); } } private Oplog getOldestLiveOplog() { Oplog result = null; synchronized (getOplogIdToOplog()) { for (Oplog oplog : getOplogIdToOplog().values()) { if (result == null || oplog.getOplogId() < result.getOplogId()) { result = oplog; } } // since the inactiveOplogs map is an LRU we need to check each one for (Oplog oplog : inactiveOplogs.values()) { if (result == null || oplog.getOplogId() < result.getOplogId()) { result = oplog; } } } return result; } void inactiveAccessed(Oplog oplog) { Long key = oplog.getOplogId(); synchronized (getOplogIdToOplog()) { // update last access time inactiveOplogs.get(key); } } void inactiveReopened(Oplog oplog) { addInactive(oplog, true); } void addInactive(Oplog oplog) { addInactive(oplog, false); } private void addInactive(Oplog oplog, boolean reopen) { Long key = oplog.getOplogId(); List<Oplog> openlist = null; synchronized (getOplogIdToOplog()) { boolean isInactive = true; if (reopen) { // It is possible that 'oplog' is compactible instead of inactive. So set isInactive. isInactive = inactiveOplogs.get(key) != null; } else { inactiveOplogs.put(key, oplog); } if (reopen && isInactive || oplog.isRAFOpen()) { if (inactiveOpenCount.incrementAndGet() > DiskStoreImpl.MAX_OPEN_INACTIVE_OPLOGS) { openlist = new ArrayList<>(); for (Oplog inactiveOplog : inactiveOplogs.values()) { if (inactiveOplog.isRAFOpen()) { // add to my list openlist.add(inactiveOplog); } } } } } if (openlist != null) { for (Oplog openOplog : openlist) { if (openOplog.closeRAF()) { if (inactiveOpenCount.decrementAndGet() <= DiskStoreImpl.MAX_OPEN_INACTIVE_OPLOGS) { break; } } } } if (!reopen) { parent.getStats().incInactiveOplogs(1); } } public void clear(DiskRegion diskRegion, RegionVersionVector<DiskStoreID> regionVersionVector) { // call clear on each oplog Collection<Oplog> oplogsToClear = new ArrayList<>(); synchronized (getOplogIdToOplog()) { oplogsToClear.addAll(getOplogIdToOplog().values()); oplogsToClear.addAll(inactiveOplogs.values()); Oplog child = getChild(); if (child != null) { oplogsToClear.add(child); } } for (Oplog oplog : oplogsToClear) { oplog.clear(diskRegion, regionVersionVector); } if (regionVersionVector != null) { parent.getDiskInitFile().clearRegion(diskRegion, regionVersionVector); } else { long clearedOplogEntryId = getOplogEntryId(); parent.getDiskInitFile().clearRegion(diskRegion, clearedOplogEntryId); } } public RuntimeException close() { RuntimeException firstRuntimeException = null; try { closeOtherOplogs(); } catch (RuntimeException e) { firstRuntimeException = e; } if (child != null) { try { child.finishKrf(); } catch (RuntimeException e) { if (firstRuntimeException != null) { firstRuntimeException = e; } } try { child.close(); } catch (RuntimeException e) { if (firstRuntimeException != null) { firstRuntimeException = e; } } } return firstRuntimeException; } /** closes all the oplogs except the current one * */ private void closeOtherOplogs() { // get a snapshot to prevent CME Oplog[] oplogs = getAllOplogs(); // if there are oplogs which are to be compacted, destroy them do not do oplogs[0] for (int i = 1; i < oplogs.length; i++) { oplogs[i].finishKrf(); oplogs[i].close(); removeOplog(oplogs[i].getOplogId()); } } /** * Removes the oplog from the map given the oplogId * * @param id id of the oplog to be removed from the list * @return oplog Oplog which has been removed */ Oplog removeOplog(long id) { return removeOplog(id, false, null); } Oplog removeOplog(long id, boolean deleting, Oplog olgToAddToDrfOnly) { Oplog oplog; boolean drfOnly = false; boolean inactive = false; synchronized (getOplogIdToOplog()) { Long key = id; oplog = getOplogIdToOplog().remove(key); if (oplog == null) { oplog = inactiveOplogs.remove(key); if (oplog != null) { if (oplog.isRAFOpen()) { inactiveOpenCount.decrementAndGet(); } inactive = true; } else { oplog = drfOnlyOplogs.remove(key); if (oplog != null) { drfOnly = true; } } } if (olgToAddToDrfOnly != null) { addDrf(olgToAddToDrfOnly); } } if (oplog != null) { if (!drfOnly) { if (inactive) { parent.getStats().incInactiveOplogs(-1); } else { parent.getStats().incCompactableOplogs(-1); } } if (!deleting && !oplog.isOplogEmpty()) { // we are removing an oplog whose files are not deleted parent.undeletedOplogSize.addAndGet(oplog.getOplogSize()); } } return oplog; } public void basicClose(DiskRegion dr) { List<Oplog> oplogsToClose = new ArrayList<>(); synchronized (getOplogIdToOplog()) { oplogsToClose.addAll(getOplogIdToOplog().values()); oplogsToClose.addAll(inactiveOplogs.values()); oplogsToClose.addAll(drfOnlyOplogs.values()); Oplog child = getChild(); if (child != null) { oplogsToClose.add(child); } } for (Oplog oplog : oplogsToClose) { oplog.close(dr); } } public void prepareForClose() { Collection<Oplog> oplogsToPrepare = new ArrayList<>(); synchronized (getOplogIdToOplog()) { oplogsToPrepare.addAll(getOplogIdToOplog().values()); oplogsToPrepare.addAll(inactiveOplogs.values()); } boolean childPreparedForClose = false; long childOplogId = getChild() == null ? -1 : getChild().oplogId; for (Oplog oplog : oplogsToPrepare) { oplog.prepareForClose(); if (childOplogId != -1 && oplog.oplogId == childOplogId) { childPreparedForClose = true; } } if (!childPreparedForClose && getChild() != null) { getChild().prepareForClose(); } } public void basicDestroy(DiskRegion diskRegion) { Collection<Oplog> oplogsToDestroy = new ArrayList<>(); synchronized (getOplogIdToOplog()) { oplogsToDestroy.addAll(getOplogIdToOplog().values()); oplogsToDestroy.addAll(inactiveOplogs.values()); oplogsToDestroy.addAll(drfOnlyOplogs.values()); Oplog child = getChild(); if (child != null) { oplogsToDestroy.add(child); } } for (Oplog oplog : oplogsToDestroy) { oplog.destroy(diskRegion); } } void destroyAllOplogs() { // get a snapshot to prevent ConcurrentModificationException for (Oplog oplog : getAllOplogs()) { if (oplog != null) { oplog.destroy(); removeOplog(oplog.getOplogId()); } } } /** * Add compactable oplogs to the list, up to the maximum size. */ void getCompactableOplogs(List<CompactableOplog> compactableOplogs, int max) { synchronized (getOplogIdToOplog()) { for (Oplog oplog : getOplogIdToOplog().values()) { if (compactableOplogs.size() >= max) { return; } if (oplog.needsCompaction()) { compactableOplogs.add(oplog); } } } } void scheduleForRecovery(DiskRecoveryStore diskRecoveryStore) { DiskRegionView diskRegionView = diskRecoveryStore.getDiskRegionView(); if (diskRegionView.isRecreated() && (diskRegionView.getMyPersistentID() != null || diskRegionView.getMyInitializingID() != null)) { // If a region does not have either id then don't pay the cost // of scanning the oplogs for recovered data. synchronized (pendingRecoveryMap) { pendingRecoveryMap.put(diskRegionView.getId(), diskRecoveryStore); } } } /** * Returns null if we are not currently recovering the DiskRegion with the given drId. */ DiskRecoveryStore getCurrentlyRecovering(long drId) { return currentRecoveryMap.get(drId); } void initChild() { if (getChild() == null) { setFirstChild(getSortedOplogs(), true); } } public void offlineCompact() { if (getChild() != null) { // check active oplog and if it is empty delete it getChild().krfClose(); if (getChild().isOplogEmpty()) { getChild().destroy(); } } // remove any oplogs that only have a drf Collection<Oplog> oplogsToDestroy = new ArrayList<>(); synchronized (getOplogIdToOplog()) { for (Oplog oplog : getOplogIdToOplog().values()) { if (oplog.isDrfOnly()) { oplogsToDestroy.add(oplog); } } } for (Oplog oplog : oplogsToDestroy) { oplog.destroy(); } destroyOldestReadyToCompact(); } public DiskStoreImpl getParent() { return parent; } void updateDiskRegion(AbstractDiskRegion diskRegion) { for (Oplog oplog : getAllOplogs()) { if (oplog != null) { oplog.updateDiskRegion(diskRegion); } } } void flushChild() { Oplog oplog = getChild(); if (oplog != null) { oplog.flushAll(); } } public String getPrefix() { return OplogType.BACKUP.getPrefix(); } void crfCreate(long oplogId) { getParent().getDiskInitFile().crfCreate(oplogId); } void drfCreate(long oplogId) { getParent().getDiskInitFile().drfCreate(oplogId); } void crfDelete(long oplogId) { getParent().getDiskInitFile().crfDelete(oplogId); } void drfDelete(long oplogId) { getParent().getDiskInitFile().drfDelete(oplogId); } boolean couldHaveKrf() { return getParent().couldHaveKrf(); } public boolean isCompactionPossible() { return getParent().isCompactionPossible(); } /** oplogs that are ready to compact */ Map<Long, Oplog> getOplogIdToOplog() { return oplogIdToOplog; } AtomicBoolean getAlreadyRecoveredOnce() { return alreadyRecoveredOnce; } @VisibleForTesting Map<Long, DiskRecoveryStore> getPendingRecoveryMap() { return pendingRecoveryMap; } }
googleapis/google-cloud-java
35,346
java-talent/grpc-google-cloud-talent-v4/src/main/java/com/google/cloud/talent/v4/CompanyServiceGrpc.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.talent.v4; import static io.grpc.MethodDescriptor.generateFullMethodName; /** * * * <pre> * A service that handles company management, including CRUD and enumeration. * </pre> */ @javax.annotation.Generated( value = "by gRPC proto compiler", comments = "Source: google/cloud/talent/v4/company_service.proto") @io.grpc.stub.annotations.GrpcGenerated public final class CompanyServiceGrpc { private CompanyServiceGrpc() {} public static final java.lang.String SERVICE_NAME = "google.cloud.talent.v4.CompanyService"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor< com.google.cloud.talent.v4.CreateCompanyRequest, com.google.cloud.talent.v4.Company> getCreateCompanyMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "CreateCompany", requestType = com.google.cloud.talent.v4.CreateCompanyRequest.class, responseType = com.google.cloud.talent.v4.Company.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.talent.v4.CreateCompanyRequest, com.google.cloud.talent.v4.Company> getCreateCompanyMethod() { io.grpc.MethodDescriptor< com.google.cloud.talent.v4.CreateCompanyRequest, com.google.cloud.talent.v4.Company> getCreateCompanyMethod; if ((getCreateCompanyMethod = CompanyServiceGrpc.getCreateCompanyMethod) == null) { synchronized (CompanyServiceGrpc.class) { if ((getCreateCompanyMethod = CompanyServiceGrpc.getCreateCompanyMethod) == null) { CompanyServiceGrpc.getCreateCompanyMethod = getCreateCompanyMethod = io.grpc.MethodDescriptor .<com.google.cloud.talent.v4.CreateCompanyRequest, com.google.cloud.talent.v4.Company> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "CreateCompany")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.talent.v4.CreateCompanyRequest.getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.talent.v4.Company.getDefaultInstance())) .setSchemaDescriptor( new CompanyServiceMethodDescriptorSupplier("CreateCompany")) .build(); } } } return getCreateCompanyMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.talent.v4.GetCompanyRequest, com.google.cloud.talent.v4.Company> getGetCompanyMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "GetCompany", requestType = com.google.cloud.talent.v4.GetCompanyRequest.class, responseType = com.google.cloud.talent.v4.Company.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.talent.v4.GetCompanyRequest, com.google.cloud.talent.v4.Company> getGetCompanyMethod() { io.grpc.MethodDescriptor< com.google.cloud.talent.v4.GetCompanyRequest, com.google.cloud.talent.v4.Company> getGetCompanyMethod; if ((getGetCompanyMethod = CompanyServiceGrpc.getGetCompanyMethod) == null) { synchronized (CompanyServiceGrpc.class) { if ((getGetCompanyMethod = CompanyServiceGrpc.getGetCompanyMethod) == null) { CompanyServiceGrpc.getGetCompanyMethod = getGetCompanyMethod = io.grpc.MethodDescriptor .<com.google.cloud.talent.v4.GetCompanyRequest, com.google.cloud.talent.v4.Company> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetCompany")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.talent.v4.GetCompanyRequest.getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.talent.v4.Company.getDefaultInstance())) .setSchemaDescriptor(new CompanyServiceMethodDescriptorSupplier("GetCompany")) .build(); } } } return getGetCompanyMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.talent.v4.UpdateCompanyRequest, com.google.cloud.talent.v4.Company> getUpdateCompanyMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "UpdateCompany", requestType = com.google.cloud.talent.v4.UpdateCompanyRequest.class, responseType = com.google.cloud.talent.v4.Company.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.talent.v4.UpdateCompanyRequest, com.google.cloud.talent.v4.Company> getUpdateCompanyMethod() { io.grpc.MethodDescriptor< com.google.cloud.talent.v4.UpdateCompanyRequest, com.google.cloud.talent.v4.Company> getUpdateCompanyMethod; if ((getUpdateCompanyMethod = CompanyServiceGrpc.getUpdateCompanyMethod) == null) { synchronized (CompanyServiceGrpc.class) { if ((getUpdateCompanyMethod = CompanyServiceGrpc.getUpdateCompanyMethod) == null) { CompanyServiceGrpc.getUpdateCompanyMethod = getUpdateCompanyMethod = io.grpc.MethodDescriptor .<com.google.cloud.talent.v4.UpdateCompanyRequest, com.google.cloud.talent.v4.Company> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "UpdateCompany")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.talent.v4.UpdateCompanyRequest.getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.talent.v4.Company.getDefaultInstance())) .setSchemaDescriptor( new CompanyServiceMethodDescriptorSupplier("UpdateCompany")) .build(); } } } return getUpdateCompanyMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.talent.v4.DeleteCompanyRequest, com.google.protobuf.Empty> getDeleteCompanyMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "DeleteCompany", requestType = com.google.cloud.talent.v4.DeleteCompanyRequest.class, responseType = com.google.protobuf.Empty.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.talent.v4.DeleteCompanyRequest, com.google.protobuf.Empty> getDeleteCompanyMethod() { io.grpc.MethodDescriptor< com.google.cloud.talent.v4.DeleteCompanyRequest, com.google.protobuf.Empty> getDeleteCompanyMethod; if ((getDeleteCompanyMethod = CompanyServiceGrpc.getDeleteCompanyMethod) == null) { synchronized (CompanyServiceGrpc.class) { if ((getDeleteCompanyMethod = CompanyServiceGrpc.getDeleteCompanyMethod) == null) { CompanyServiceGrpc.getDeleteCompanyMethod = getDeleteCompanyMethod = io.grpc.MethodDescriptor .<com.google.cloud.talent.v4.DeleteCompanyRequest, com.google.protobuf.Empty> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "DeleteCompany")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.talent.v4.DeleteCompanyRequest.getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.protobuf.Empty.getDefaultInstance())) .setSchemaDescriptor( new CompanyServiceMethodDescriptorSupplier("DeleteCompany")) .build(); } } } return getDeleteCompanyMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.talent.v4.ListCompaniesRequest, com.google.cloud.talent.v4.ListCompaniesResponse> getListCompaniesMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "ListCompanies", requestType = com.google.cloud.talent.v4.ListCompaniesRequest.class, responseType = com.google.cloud.talent.v4.ListCompaniesResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.talent.v4.ListCompaniesRequest, com.google.cloud.talent.v4.ListCompaniesResponse> getListCompaniesMethod() { io.grpc.MethodDescriptor< com.google.cloud.talent.v4.ListCompaniesRequest, com.google.cloud.talent.v4.ListCompaniesResponse> getListCompaniesMethod; if ((getListCompaniesMethod = CompanyServiceGrpc.getListCompaniesMethod) == null) { synchronized (CompanyServiceGrpc.class) { if ((getListCompaniesMethod = CompanyServiceGrpc.getListCompaniesMethod) == null) { CompanyServiceGrpc.getListCompaniesMethod = getListCompaniesMethod = io.grpc.MethodDescriptor .<com.google.cloud.talent.v4.ListCompaniesRequest, com.google.cloud.talent.v4.ListCompaniesResponse> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListCompanies")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.talent.v4.ListCompaniesRequest.getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.talent.v4.ListCompaniesResponse .getDefaultInstance())) .setSchemaDescriptor( new CompanyServiceMethodDescriptorSupplier("ListCompanies")) .build(); } } } return getListCompaniesMethod; } /** Creates a new async stub that supports all call types for the service */ public static CompanyServiceStub newStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<CompanyServiceStub> factory = new io.grpc.stub.AbstractStub.StubFactory<CompanyServiceStub>() { @java.lang.Override public CompanyServiceStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new CompanyServiceStub(channel, callOptions); } }; return CompanyServiceStub.newStub(factory, channel); } /** Creates a new blocking-style stub that supports all types of calls on the service */ public static CompanyServiceBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<CompanyServiceBlockingV2Stub> factory = new io.grpc.stub.AbstractStub.StubFactory<CompanyServiceBlockingV2Stub>() { @java.lang.Override public CompanyServiceBlockingV2Stub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new CompanyServiceBlockingV2Stub(channel, callOptions); } }; return CompanyServiceBlockingV2Stub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static CompanyServiceBlockingStub newBlockingStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<CompanyServiceBlockingStub> factory = new io.grpc.stub.AbstractStub.StubFactory<CompanyServiceBlockingStub>() { @java.lang.Override public CompanyServiceBlockingStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new CompanyServiceBlockingStub(channel, callOptions); } }; return CompanyServiceBlockingStub.newStub(factory, channel); } /** Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static CompanyServiceFutureStub newFutureStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<CompanyServiceFutureStub> factory = new io.grpc.stub.AbstractStub.StubFactory<CompanyServiceFutureStub>() { @java.lang.Override public CompanyServiceFutureStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new CompanyServiceFutureStub(channel, callOptions); } }; return CompanyServiceFutureStub.newStub(factory, channel); } /** * * * <pre> * A service that handles company management, including CRUD and enumeration. * </pre> */ public interface AsyncService { /** * * * <pre> * Creates a new company entity. * </pre> */ default void createCompany( com.google.cloud.talent.v4.CreateCompanyRequest request, io.grpc.stub.StreamObserver<com.google.cloud.talent.v4.Company> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getCreateCompanyMethod(), responseObserver); } /** * * * <pre> * Retrieves specified company. * </pre> */ default void getCompany( com.google.cloud.talent.v4.GetCompanyRequest request, io.grpc.stub.StreamObserver<com.google.cloud.talent.v4.Company> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetCompanyMethod(), responseObserver); } /** * * * <pre> * Updates specified company. * </pre> */ default void updateCompany( com.google.cloud.talent.v4.UpdateCompanyRequest request, io.grpc.stub.StreamObserver<com.google.cloud.talent.v4.Company> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getUpdateCompanyMethod(), responseObserver); } /** * * * <pre> * Deletes specified company. * Prerequisite: The company has no jobs associated with it. * </pre> */ default void deleteCompany( com.google.cloud.talent.v4.DeleteCompanyRequest request, io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getDeleteCompanyMethod(), responseObserver); } /** * * * <pre> * Lists all companies associated with the project. * </pre> */ default void listCompanies( com.google.cloud.talent.v4.ListCompaniesRequest request, io.grpc.stub.StreamObserver<com.google.cloud.talent.v4.ListCompaniesResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getListCompaniesMethod(), responseObserver); } } /** * Base class for the server implementation of the service CompanyService. * * <pre> * A service that handles company management, including CRUD and enumeration. * </pre> */ public abstract static class CompanyServiceImplBase implements io.grpc.BindableService, AsyncService { @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return CompanyServiceGrpc.bindService(this); } } /** * A stub to allow clients to do asynchronous rpc calls to service CompanyService. * * <pre> * A service that handles company management, including CRUD and enumeration. * </pre> */ public static final class CompanyServiceStub extends io.grpc.stub.AbstractAsyncStub<CompanyServiceStub> { private CompanyServiceStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected CompanyServiceStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new CompanyServiceStub(channel, callOptions); } /** * * * <pre> * Creates a new company entity. * </pre> */ public void createCompany( com.google.cloud.talent.v4.CreateCompanyRequest request, io.grpc.stub.StreamObserver<com.google.cloud.talent.v4.Company> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getCreateCompanyMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Retrieves specified company. * </pre> */ public void getCompany( com.google.cloud.talent.v4.GetCompanyRequest request, io.grpc.stub.StreamObserver<com.google.cloud.talent.v4.Company> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getGetCompanyMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Updates specified company. * </pre> */ public void updateCompany( com.google.cloud.talent.v4.UpdateCompanyRequest request, io.grpc.stub.StreamObserver<com.google.cloud.talent.v4.Company> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getUpdateCompanyMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Deletes specified company. * Prerequisite: The company has no jobs associated with it. * </pre> */ public void deleteCompany( com.google.cloud.talent.v4.DeleteCompanyRequest request, io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getDeleteCompanyMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Lists all companies associated with the project. * </pre> */ public void listCompanies( com.google.cloud.talent.v4.ListCompaniesRequest request, io.grpc.stub.StreamObserver<com.google.cloud.talent.v4.ListCompaniesResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getListCompaniesMethod(), getCallOptions()), request, responseObserver); } } /** * A stub to allow clients to do synchronous rpc calls to service CompanyService. * * <pre> * A service that handles company management, including CRUD and enumeration. * </pre> */ public static final class CompanyServiceBlockingV2Stub extends io.grpc.stub.AbstractBlockingStub<CompanyServiceBlockingV2Stub> { private CompanyServiceBlockingV2Stub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected CompanyServiceBlockingV2Stub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new CompanyServiceBlockingV2Stub(channel, callOptions); } /** * * * <pre> * Creates a new company entity. * </pre> */ public com.google.cloud.talent.v4.Company createCompany( com.google.cloud.talent.v4.CreateCompanyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getCreateCompanyMethod(), getCallOptions(), request); } /** * * * <pre> * Retrieves specified company. * </pre> */ public com.google.cloud.talent.v4.Company getCompany( com.google.cloud.talent.v4.GetCompanyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetCompanyMethod(), getCallOptions(), request); } /** * * * <pre> * Updates specified company. * </pre> */ public com.google.cloud.talent.v4.Company updateCompany( com.google.cloud.talent.v4.UpdateCompanyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getUpdateCompanyMethod(), getCallOptions(), request); } /** * * * <pre> * Deletes specified company. * Prerequisite: The company has no jobs associated with it. * </pre> */ public com.google.protobuf.Empty deleteCompany( com.google.cloud.talent.v4.DeleteCompanyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDeleteCompanyMethod(), getCallOptions(), request); } /** * * * <pre> * Lists all companies associated with the project. * </pre> */ public com.google.cloud.talent.v4.ListCompaniesResponse listCompanies( com.google.cloud.talent.v4.ListCompaniesRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListCompaniesMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do limited synchronous rpc calls to service CompanyService. * * <pre> * A service that handles company management, including CRUD and enumeration. * </pre> */ public static final class CompanyServiceBlockingStub extends io.grpc.stub.AbstractBlockingStub<CompanyServiceBlockingStub> { private CompanyServiceBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected CompanyServiceBlockingStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new CompanyServiceBlockingStub(channel, callOptions); } /** * * * <pre> * Creates a new company entity. * </pre> */ public com.google.cloud.talent.v4.Company createCompany( com.google.cloud.talent.v4.CreateCompanyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getCreateCompanyMethod(), getCallOptions(), request); } /** * * * <pre> * Retrieves specified company. * </pre> */ public com.google.cloud.talent.v4.Company getCompany( com.google.cloud.talent.v4.GetCompanyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetCompanyMethod(), getCallOptions(), request); } /** * * * <pre> * Updates specified company. * </pre> */ public com.google.cloud.talent.v4.Company updateCompany( com.google.cloud.talent.v4.UpdateCompanyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getUpdateCompanyMethod(), getCallOptions(), request); } /** * * * <pre> * Deletes specified company. * Prerequisite: The company has no jobs associated with it. * </pre> */ public com.google.protobuf.Empty deleteCompany( com.google.cloud.talent.v4.DeleteCompanyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDeleteCompanyMethod(), getCallOptions(), request); } /** * * * <pre> * Lists all companies associated with the project. * </pre> */ public com.google.cloud.talent.v4.ListCompaniesResponse listCompanies( com.google.cloud.talent.v4.ListCompaniesRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListCompaniesMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do ListenableFuture-style rpc calls to service CompanyService. * * <pre> * A service that handles company management, including CRUD and enumeration. * </pre> */ public static final class CompanyServiceFutureStub extends io.grpc.stub.AbstractFutureStub<CompanyServiceFutureStub> { private CompanyServiceFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected CompanyServiceFutureStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new CompanyServiceFutureStub(channel, callOptions); } /** * * * <pre> * Creates a new company entity. * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.cloud.talent.v4.Company> createCompany(com.google.cloud.talent.v4.CreateCompanyRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getCreateCompanyMethod(), getCallOptions()), request); } /** * * * <pre> * Retrieves specified company. * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.cloud.talent.v4.Company> getCompany(com.google.cloud.talent.v4.GetCompanyRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getGetCompanyMethod(), getCallOptions()), request); } /** * * * <pre> * Updates specified company. * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.cloud.talent.v4.Company> updateCompany(com.google.cloud.talent.v4.UpdateCompanyRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getUpdateCompanyMethod(), getCallOptions()), request); } /** * * * <pre> * Deletes specified company. * Prerequisite: The company has no jobs associated with it. * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.protobuf.Empty> deleteCompany(com.google.cloud.talent.v4.DeleteCompanyRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getDeleteCompanyMethod(), getCallOptions()), request); } /** * * * <pre> * Lists all companies associated with the project. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.talent.v4.ListCompaniesResponse> listCompanies(com.google.cloud.talent.v4.ListCompaniesRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getListCompaniesMethod(), getCallOptions()), request); } } private static final int METHODID_CREATE_COMPANY = 0; private static final int METHODID_GET_COMPANY = 1; private static final int METHODID_UPDATE_COMPANY = 2; private static final int METHODID_DELETE_COMPANY = 3; private static final int METHODID_LIST_COMPANIES = 4; private static final class MethodHandlers<Req, Resp> implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>, io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> { private final AsyncService serviceImpl; private final int methodId; MethodHandlers(AsyncService serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { case METHODID_CREATE_COMPANY: serviceImpl.createCompany( (com.google.cloud.talent.v4.CreateCompanyRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.talent.v4.Company>) responseObserver); break; case METHODID_GET_COMPANY: serviceImpl.getCompany( (com.google.cloud.talent.v4.GetCompanyRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.talent.v4.Company>) responseObserver); break; case METHODID_UPDATE_COMPANY: serviceImpl.updateCompany( (com.google.cloud.talent.v4.UpdateCompanyRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.talent.v4.Company>) responseObserver); break; case METHODID_DELETE_COMPANY: serviceImpl.deleteCompany( (com.google.cloud.talent.v4.DeleteCompanyRequest) request, (io.grpc.stub.StreamObserver<com.google.protobuf.Empty>) responseObserver); break; case METHODID_LIST_COMPANIES: serviceImpl.listCompanies( (com.google.cloud.talent.v4.ListCompaniesRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.talent.v4.ListCompaniesResponse>) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver<Req> invoke( io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getCreateCompanyMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.talent.v4.CreateCompanyRequest, com.google.cloud.talent.v4.Company>(service, METHODID_CREATE_COMPANY))) .addMethod( getGetCompanyMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.talent.v4.GetCompanyRequest, com.google.cloud.talent.v4.Company>(service, METHODID_GET_COMPANY))) .addMethod( getUpdateCompanyMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.talent.v4.UpdateCompanyRequest, com.google.cloud.talent.v4.Company>(service, METHODID_UPDATE_COMPANY))) .addMethod( getDeleteCompanyMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.talent.v4.DeleteCompanyRequest, com.google.protobuf.Empty>( service, METHODID_DELETE_COMPANY))) .addMethod( getListCompaniesMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.talent.v4.ListCompaniesRequest, com.google.cloud.talent.v4.ListCompaniesResponse>( service, METHODID_LIST_COMPANIES))) .build(); } private abstract static class CompanyServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { CompanyServiceBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return com.google.cloud.talent.v4.CompanyServiceProto.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("CompanyService"); } } private static final class CompanyServiceFileDescriptorSupplier extends CompanyServiceBaseDescriptorSupplier { CompanyServiceFileDescriptorSupplier() {} } private static final class CompanyServiceMethodDescriptorSupplier extends CompanyServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final java.lang.String methodName; CompanyServiceMethodDescriptorSupplier(java.lang.String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (CompanyServiceGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new CompanyServiceFileDescriptorSupplier()) .addMethod(getCreateCompanyMethod()) .addMethod(getGetCompanyMethod()) .addMethod(getUpdateCompanyMethod()) .addMethod(getDeleteCompanyMethod()) .addMethod(getListCompaniesMethod()) .build(); } } } return result; } }
oracle/nosql
35,239
kvtest/kvstore-IT/src/main/java/oracle/kv/impl/api/security/SecureTableOpsTest.java
/*- * See the file LICENSE for redistribution information. * * Copyright (c) 2011, 2025 Oracle and/or its affiliates. All rights reserved. * */ package oracle.kv.impl.api.security; import static oracle.kv.impl.security.KVStorePrivilegeLabel.CREATE_ANY_INDEX; import static oracle.kv.impl.security.KVStorePrivilegeLabel.CREATE_ANY_TABLE; import static oracle.kv.impl.security.KVStorePrivilegeLabel.DELETE_ANY_TABLE; import static oracle.kv.impl.security.KVStorePrivilegeLabel.DELETE_TABLE; import static oracle.kv.impl.security.KVStorePrivilegeLabel.INSERT_ANY_TABLE; import static oracle.kv.impl.security.KVStorePrivilegeLabel.INSERT_TABLE; import static oracle.kv.impl.security.KVStorePrivilegeLabel.READ_ANY; import static oracle.kv.impl.security.KVStorePrivilegeLabel.READ_ANY_TABLE; import static oracle.kv.impl.security.KVStorePrivilegeLabel.READ_TABLE; import static oracle.kv.impl.security.KVStorePrivilegeLabel.WRITE_ANY; import static oracle.kv.util.DDLTestUtils.execStatement; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.fail; import java.util.HashMap; import java.util.Map; import oracle.kv.FaultException; import oracle.kv.KVStore; import oracle.kv.Key; import oracle.kv.Value; import oracle.kv.Version; import oracle.kv.LoginCredentials; import oracle.kv.PasswordCredentials; import oracle.kv.ReturnValueVersion; import oracle.kv.StatementResult; import oracle.kv.UnauthorizedException; import oracle.kv.impl.api.KVStoreImpl; import oracle.kv.impl.api.ops.InternalOperation; import oracle.kv.impl.api.table.RowImpl; import oracle.kv.impl.api.table.TableImpl; import oracle.kv.impl.api.table.TableKey; import oracle.kv.impl.security.KVStorePrivilege.PrivilegeType; import oracle.kv.impl.security.KVStorePrivilegeLabel; import oracle.kv.impl.security.SecureTestBase; import oracle.kv.table.IndexKey; import oracle.kv.table.PrimaryKey; import oracle.kv.table.Row; import oracle.kv.table.Table; import oracle.kv.table.TableAPI; import oracle.kv.table.TimeToLive; import oracle.kv.util.CreateStore.SecureUser; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; /** * Tests the table data operation access check. In this test, we modify the * privileges of the testing user by changing the granted privileges of the * role granted to the testing user. */ public class SecureTableOpsTest extends SecureTestBase { private static final String SUPER_USER = "super"; private static final String SUPER_USER_PWD = "NoSql00__super123"; private static final String TEST_USER = "test"; private static final String TEST_USER_PWD = "NoSql00__test123"; private static final String TEST_TABLE = "test_table"; private static final String TEST_CHILD_TABLE = "test_table.child"; private static final String TEST_TABLE_B = "test_table_b"; private static final String TEST_TTL_TABLE = "test_ttl_table"; private static final String TEST_TABLE_DEF = " (id integer, name string, primary key(id))"; private static final String TEST_CHILD_TABLE_DEF = " (email string, address string, primary key (email))"; private static final String TEST_TABLE_B_DEF = " (id integer, name string, salary integer, primary key(id))"; private static final String TEST_TABLE_INDEX = "test_name"; private static final String TEST_TABLE_INDEX_FIELD = "name"; private static final String TEST_TABLE_ROW_JSON_STR = "{\"id\":1, \"name\":\"jim\"}"; private static final String TEST_CHILD_TABLE_INDEX = "test_child_addr"; private static final String TEST_CHILD_TABLE_INDEX_FIELD = "address"; private static final String TEST_CHILD_TABLE_ROW_JSON_STR = "{\"id\":1, \"email\":\"test@\", \"address\":\"earth\"}"; private static final String TEST_TABLE_B_ROW_JSON_STR = "{\"id\":1, \"name\":\"jim\", \"salary\":3000}"; private static final Map<String, String> tableRowMap = new HashMap<String, String>(); static { tableRowMap.put(TEST_CHILD_TABLE, TEST_CHILD_TABLE_ROW_JSON_STR); tableRowMap.put(TEST_TABLE, TEST_TABLE_ROW_JSON_STR); tableRowMap.put(TEST_TTL_TABLE, TEST_TABLE_ROW_JSON_STR); tableRowMap.put(TEST_TABLE_B, TEST_TABLE_B_ROW_JSON_STR); } private static final String TEST_ROLE = "testrole"; private static KVStore superUserStore; private static KVStore testUserStore; @BeforeClass public static void staticSetUp() throws Exception { users.add(new SecureUser(SUPER_USER, SUPER_USER_PWD, true /* admin */)); users.add(new SecureUser(TEST_USER, TEST_USER_PWD, false /* admin */)); startup(); initStores(); prepareTest(); } @AfterClass public static void staticTearDown() throws Exception { closeStores(); shutdown(); } @Override protected void clearTestDirectory() { /* do not clean test directory */ } @Test public void testTableOwnership() throws Exception { /* Add some data to test_table, which is owned by super_user */ Row row = createOneRowForTable(TEST_TABLE); TableAPI testUserTableAPI = testUserStore.getTableAPI(); IndexKey idxKey = row.getTable().getIndex(TEST_TABLE_INDEX).createIndexKey(); /* * Non-owner user could not do any access operation on TEST_TABLE without * explicit grant */ try { populateTableWithOneRow(superUserStore, row); final TableImpl testTableImpl = (TableImpl) testUserTableAPI.getTable(TEST_TABLE); assertNotEquals(testTableImpl.getOwner().name(), TEST_USER); testDeniedInsertOps(testUserStore, row); testDeniedReadOps(testUserStore, row, idxKey); testDeniedDeleteOps(testUserStore, row); } finally { cleanOneRowFromTable(superUserStore, row); } /* * Build another table using account of test_user, and tests that all * access operations are valid even without any grant. */ final String anotherTable = "another_table"; try { grantPrivToRole(TEST_ROLE, CREATE_ANY_TABLE, CREATE_ANY_INDEX); execStatement(testUserStore, "create table " + anotherTable + " (id integer, idxfield string, primary key(id))"); execStatement(testUserStore, "create index idx1 on " + anotherTable + " (idxfield)"); final Table table = testUserTableAPI.getTable(anotherTable); row = table.createRow(); row.put("id", 1); row.put("idxfield", "t1"); idxKey = table.getIndex("idx1").createIndexKey(); testValidInsertOps(row); populateTableWithOneRow(superUserStore, row); testValidReadOps(testUserStore, row, idxKey); testValidDeleteOps(row); } finally { /* clean up */ execStatement(testUserStore, "drop table if exists " + anotherTable); revokePrivFromRole(TEST_ROLE, CREATE_ANY_TABLE, CREATE_ANY_INDEX); } } @Test public void testTableReadPermission() throws Exception { final Row row = createOneRowForTable(TEST_TABLE); final IndexKey idxKey = row.getTable().getIndex(TEST_TABLE_INDEX).createIndexKey(); populateTableWithOneRow(superUserStore, row); try { /* Removed all privileges enabling table read */ revokePrivFromRole(TEST_ROLE, READ_ANY_TABLE, READ_ANY); revokePrivFromRole(TEST_ROLE, TEST_TABLE, READ_TABLE); /* all read ops are denied */ testDeniedReadOps(testUserStore, row, idxKey); /* test read_table */ try { grantPrivToRole(TEST_ROLE, TEST_TABLE, READ_TABLE); testValidReadOps(testUserStore, row, idxKey); } finally { revokePrivFromRole(TEST_ROLE, TEST_TABLE, READ_TABLE); testDeniedReadOps(testUserStore, row, idxKey); } /* test read_any_table */ try { grantPrivToRole(TEST_ROLE, READ_ANY_TABLE); testValidReadOps(testUserStore, row, idxKey); } finally { revokePrivFromRole(TEST_ROLE, READ_ANY_TABLE); testDeniedReadOps(testUserStore, row, idxKey); } /* test read_any */ try { grantPrivToRole(TEST_ROLE, READ_ANY); testValidReadOps(testUserStore, row, idxKey); } finally { revokePrivFromRole(TEST_ROLE, READ_ANY); testDeniedReadOps(testUserStore, row, idxKey); } } finally { cleanOneRowFromTable(superUserStore, row); } } @Test public void testTableInsertPermission() throws Exception { final Row row = createOneRowForTable(TEST_TABLE); /* Removed all privileges enabling table insert */ revokePrivFromRole(TEST_ROLE, WRITE_ANY, INSERT_ANY_TABLE); revokePrivFromRole(TEST_ROLE, TEST_TABLE, INSERT_TABLE); /* all read ops are denied */ testDeniedInsertOps(testUserStore, row); /* test insert_table */ try { grantPrivToRole(TEST_ROLE, TEST_TABLE, INSERT_TABLE); testValidInsertOps(row); } finally { revokePrivFromRole(TEST_ROLE, TEST_TABLE, INSERT_TABLE); testDeniedInsertOps(testUserStore, row); } /* test insert_any_table */ try { grantPrivToRole(TEST_ROLE, INSERT_ANY_TABLE); testValidInsertOps(row); } finally { revokePrivFromRole(TEST_ROLE, INSERT_ANY_TABLE); testDeniedInsertOps(testUserStore, row); } /* test write_any */ try { grantPrivToRole(TEST_ROLE, WRITE_ANY); testValidInsertOps(row); } finally { revokePrivFromRole(TEST_ROLE, WRITE_ANY); testDeniedInsertOps(testUserStore, row); } } @Test public void testTableDeletePermission() throws Exception { final Row row = createOneRowForTable(TEST_TABLE); populateTableWithOneRow(superUserStore, row); try { /* Removed all privileges enabling table delete */ revokePrivFromRole(TEST_ROLE, WRITE_ANY, DELETE_ANY_TABLE); revokePrivFromRole(TEST_ROLE, TEST_TABLE, DELETE_TABLE); /* all delete ops are denied */ testDeniedDeleteOps(testUserStore, row); /* test delete_table */ try { grantPrivToRole(TEST_ROLE, TEST_TABLE, DELETE_TABLE); testValidDeleteOps(row); } finally { revokePrivFromRole(TEST_ROLE, TEST_TABLE, DELETE_TABLE); testDeniedDeleteOps(testUserStore, row); } /* test delete_any_table */ try { grantPrivToRole(TEST_ROLE, DELETE_ANY_TABLE); testValidDeleteOps(row); } finally { revokePrivFromRole(TEST_ROLE, DELETE_ANY_TABLE); testDeniedDeleteOps(testUserStore, row); } /* test write_any */ try { grantPrivToRole(TEST_ROLE, WRITE_ANY); testValidDeleteOps(row); } finally { revokePrivFromRole(TEST_ROLE, WRITE_ANY); testDeniedDeleteOps(testUserStore, row); } } finally { cleanOneRowFromTable(superUserStore, row); } } @Test public void testPrivOnParentAndChildTable() throws Exception { final Row row = createOneRowForTable(TEST_CHILD_TABLE); final IndexKey idxKey = row.getTable().getIndex(TEST_CHILD_TABLE_INDEX).createIndexKey(); /* Test table read */ revokePrivFromRole(TEST_ROLE, READ_ANY, READ_ANY_TABLE); revokePrivFromRole(TEST_ROLE, TEST_CHILD_TABLE, READ_TABLE); try { populateTableWithOneRow(superUserStore, row); /* Read privilege on parent table does not apply to child tables */ grantPrivToRole(TEST_ROLE, TEST_TABLE, READ_TABLE); testDeniedReadOps(testUserStore, row, idxKey); /* * Should be OK to read child table with read privileges on both * parent and child tables */ grantPrivToRole(TEST_ROLE, TEST_CHILD_TABLE, READ_TABLE); testValidReadOps(testUserStore, row, idxKey); /* * Read privilege on only child table is not sufficient to read * the child table */ revokePrivFromRole(TEST_ROLE, TEST_TABLE, READ_TABLE); grantPrivToRole(TEST_ROLE, TEST_CHILD_TABLE, READ_TABLE); testDeniedReadOps(testUserStore, row, idxKey); } finally { revokePrivFromRole(TEST_ROLE, TEST_TABLE, READ_TABLE); revokePrivFromRole(TEST_ROLE, TEST_CHILD_TABLE, READ_TABLE); cleanOneRowFromTable(superUserStore, row); } /* Test table insert */ revokePrivFromRole(TEST_ROLE, WRITE_ANY, INSERT_ANY_TABLE); revokePrivFromRole(TEST_ROLE, TEST_CHILD_TABLE, INSERT_TABLE); try { /* Insert privilege on parent table does not apply to child table */ grantPrivToRole(TEST_ROLE, TEST_TABLE, INSERT_TABLE); testDeniedInsertOps(testUserStore, row); /* Should be OK with insert on both child and parent table */ grantPrivToRole(TEST_ROLE, TEST_CHILD_TABLE, INSERT_TABLE); testValidInsertOps(row); /* Insert priv on only child table is not sufficient */ revokePrivFromRole(TEST_ROLE, TEST_TABLE, INSERT_TABLE); testDeniedInsertOps(testUserStore, row); /* Should be OK with read on parent table */ grantPrivToRole(TEST_ROLE, TEST_TABLE, READ_TABLE); testValidInsertOps(row); } finally { revokePrivFromRole(TEST_ROLE, TEST_TABLE, INSERT_TABLE, READ_TABLE); revokePrivFromRole(TEST_ROLE, TEST_CHILD_TABLE, INSERT_TABLE); } revokePrivFromRole(TEST_ROLE, DELETE_ANY_TABLE); revokePrivFromRole(TEST_ROLE, TEST_CHILD_TABLE, DELETE_TABLE); try { populateTableWithOneRow(superUserStore, row); /* Delete privilege on parent table does not apply to child table*/ grantPrivToRole(TEST_ROLE, TEST_TABLE, DELETE_TABLE); testDeniedDeleteOps(testUserStore, row); /* Should be OK with delete priv on both child and parent table */ grantPrivToRole(TEST_ROLE, TEST_CHILD_TABLE, DELETE_TABLE); testValidDeleteOps(row); /* Delete priv on only child table is not sufficient */ revokePrivFromRole(TEST_ROLE, TEST_TABLE, DELETE_TABLE); testDeniedDeleteOps(testUserStore, row); /* Should be OK with read priv on parent table */ grantPrivToRole(TEST_ROLE, TEST_TABLE, READ_TABLE); testValidDeleteOps(row); } finally { revokePrivFromRole(TEST_ROLE, TEST_TABLE, DELETE_TABLE, READ_TABLE); cleanOneRowFromTable(superUserStore, row); } } @Test public void testBypassTableAccessCheck() throws Exception { try { grantPrivToRole(TEST_ROLE, TEST_TABLE, READ_TABLE, INSERT_TABLE, DELETE_TABLE); final Table testTable = testUserStore.getTableAPI().getTable(TEST_TABLE); final Table testTableB = testUserStore.getTableAPI().getTable(TEST_TABLE_B); assertNotNull(testTableB); final String rowJson = tableRowMap.get(TEST_TABLE_B); assertNotNull(rowJson); /* Build a row using test table B */ final RowImpl row = (RowImpl)testTableB.createRowFromJson(rowJson, true); /* Identify id of test table */ final long testTableId = ((TableImpl) testTable).getId(); final KVStoreImpl storeImpl = (KVStoreImpl) testUserStore; /* * Exercise internal single-operation execution methods, * each method pass id of test table that test user have permission * to access but key in test table B. */ final DeniedOp[] ops = new DeniedOp[] { new DeniedOp() { @Override public void execute() throws FaultException { doPutOp(storeImpl, InternalOperation.OpCode.PUT, row.getPrimaryKey(false), row.createValue(), null, testTableId); } }, new DeniedOp() { @Override public void execute() throws FaultException { doPutOp(storeImpl, InternalOperation.OpCode.PUT_IF_ABSENT, row.getPrimaryKey(false), row.createValue(), null, testTableId); } }, new DeniedOp() { @Override public void execute() throws FaultException { doPutOp(storeImpl, InternalOperation.OpCode.PUT_IF_VERSION, row.getPrimaryKey(false), row.createValue(), OpAccessCheckTestUtils.dummyVer, testTableId); } }, new DeniedOp() { @Override public void execute() throws FaultException { storeImpl.getInternal(row.getPrimaryKey(false), testTableId, null /* Consistency */, 0 /* Timeout */, null /* Timeout unit */); } }, new DeniedOp() { @Override public void execute() throws FaultException { storeImpl.deleteInternal(row.getPrimaryKey(false), null /* ReturnValueVersion */, null /* Durability */, 0 /* Timeout */, null /* Timeout Unit*/, testTableId); } }, new DeniedOp() { @Override public void execute() throws FaultException { storeImpl.deleteIfVersionInternal( row.getPrimaryKey(false), OpAccessCheckTestUtils.dummyVer, null /* ReturnValueVersion */, null /* Durability */, 0 /* Timeout */, null /* Timeout Unit*/, testTableId); } } }; new DeniedOpExecution() { @Override void assertException(UnauthorizedException uae) { tableIdMismatchFailure(uae); } }.perform(ops); /* * Drop test table b so that the key passed in cannot identify * a table */ execStatement(superUserStore, "drop table " + TEST_TABLE_B); new DeniedOpExecution() { @Override void assertException(UnauthorizedException uae) { tableNotFoundFailure(uae); } }.perform(ops); } finally { revokePrivFromRole(TEST_ROLE, TEST_TABLE, READ_TABLE, INSERT_TABLE, DELETE_TABLE); } } private void doPutOp(KVStoreImpl storeImpl, InternalOperation.OpCode op, Key key, Value value, Version version, long tableId) { storeImpl.doPutInternal( storeImpl.makePutRequest(op, key, value, ReturnValueVersion.Choice.NONE, tableId, null, 0, null, null, false, version), null); } @Test public void testTTLAccessControl() throws Exception { Row row = createOneRowForTable(TEST_TTL_TABLE); /* Removed all privileges enabling table insert */ revokePrivFromRole(TEST_ROLE, WRITE_ANY, INSERT_ANY_TABLE, DELETE_ANY_TABLE); revokePrivFromRole(TEST_ROLE, TEST_TTL_TABLE, INSERT_TABLE, DELETE_TABLE); revokePrivFromRole(TEST_ROLE, TEST_TABLE, INSERT_TABLE, DELETE_TABLE); /* all insert ops are denied */ testDeniedInsertOps(testUserStore, row); /* * test insert_table only, all insert ops against a table * having ttl defined are denied */ try { grantPrivToRole(TEST_ROLE, TEST_TTL_TABLE, INSERT_TABLE); testDeniedTTLInsertOps(testUserStore, row); } finally { revokePrivFromRole(TEST_ROLE, TEST_TTL_TABLE, INSERT_TABLE); testDeniedInsertOps(testUserStore, row); } /* * test insert_table only, all insert ops with explicitly set ttl * as zero against a table having default ttl defined are passed */ row.setTTL(TimeToLive.DO_NOT_EXPIRE); try { grantPrivToRole(TEST_ROLE, TEST_TTL_TABLE, INSERT_TABLE); testValidInsertOps(row); } finally { revokePrivFromRole(TEST_ROLE, TEST_TTL_TABLE, INSERT_TABLE); testDeniedInsertOps(testUserStore, row); } /* * test insert_table and delete_table, all insert ops against a table * having ttl defined are passed */ row = createOneRowForTable(TEST_TTL_TABLE); try { grantPrivToRole(TEST_ROLE, TEST_TTL_TABLE, INSERT_TABLE, DELETE_TABLE); testValidInsertOps(row); } finally { revokePrivFromRole(TEST_ROLE, TEST_TTL_TABLE, INSERT_TABLE, DELETE_TABLE); testDeniedInsertOps(testUserStore, row); } /* * test insert_table only, try perform inserts with explicitly TTL * setting against a table having TTL default defined. */ row.setTTL(TimeToLive.ofDays(10)); try { grantPrivToRole(TEST_ROLE, TEST_TTL_TABLE, INSERT_TABLE); testDeniedTTLInsertOps(testUserStore, row); } finally { revokePrivFromRole(TEST_ROLE, TEST_TTL_TABLE, INSERT_TABLE); testDeniedInsertOps(testUserStore, row); } /* * test insert_table and delete_table, try perform inserts with * explicitly TTL setting against a table having TTL default passed. */ try { grantPrivToRole(TEST_ROLE, TEST_TTL_TABLE, INSERT_TABLE, DELETE_TABLE); testValidInsertOps(row); } finally { revokePrivFromRole(TEST_ROLE, TEST_TTL_TABLE, INSERT_TABLE, DELETE_TABLE); testDeniedInsertOps(testUserStore, row); } /* * test insert_table only, try perform inserts without TTL setting * against a table not having TTL default defined are passed. */ row = createOneRowForTable(TEST_TABLE); try { grantPrivToRole(TEST_ROLE, TEST_TABLE, INSERT_TABLE); testValidInsertOps(row); } finally { revokePrivFromRole(TEST_ROLE, TEST_TABLE, INSERT_TABLE); testDeniedInsertOps(testUserStore, row); } /* * test insert_table only, try perform inserts with TTL as zero setting * against a table not having TTL default defined are passed. */ row.setTTL(TimeToLive.ofDays(0)); try { grantPrivToRole(TEST_ROLE, TEST_TABLE, INSERT_TABLE); testValidInsertOps(row); } finally { revokePrivFromRole(TEST_ROLE, TEST_TABLE, INSERT_TABLE); testDeniedInsertOps(testUserStore, row); } /* * test insert_table only, try perform inserts with TTL setting against * a table not having TTL default defined are denied. */ row.setTTL(TimeToLive.ofDays(10)); try { grantPrivToRole(TEST_ROLE, TEST_TABLE, INSERT_TABLE); testDeniedTTLInsertOps(testUserStore, row); } finally { revokePrivFromRole(TEST_ROLE, TEST_TABLE, INSERT_TABLE); testDeniedInsertOps(testUserStore, row); } /* * test insert_table and delete_table only, try perform inserts with * TTL setting against a table not having TTL default defined are passed. */ try { grantPrivToRole(TEST_ROLE, TEST_TABLE, INSERT_TABLE, DELETE_TABLE); testValidInsertOps(row); } finally { revokePrivFromRole(TEST_ROLE, TEST_TABLE, INSERT_TABLE, DELETE_TABLE); testDeniedInsertOps(testUserStore, row); } } private void testValidInsertOps(Row row) throws Exception { final Key kvKey = TableKey.createKey(row.getTable(), row, false).getKey(); OpAccessCheckTestUtils.testValidTableInsertOps( testUserStore, superUserStore, row); OpAccessCheckTestUtils.testValidPutOps( testUserStore, superUserStore, kvKey, getKVValueFromRow(row)); } private void testValidDeleteOps(Row row) throws Exception { final PrimaryKey key = row.createPrimaryKey(); final Key kvKey = TableKey.createKey(row.getTable(), row, false).getKey(); OpAccessCheckTestUtils.testValidTableDeleteOps( testUserStore, superUserStore, key); OpAccessCheckTestUtils.testValidDeleteOps( testUserStore, superUserStore, kvKey, getKVValueFromRow(row)); } private void testDeniedTTLInsertOps(KVStore store, Row row) throws Exception { OpAccessCheckTestUtils.testDeniedTableInsertOps(store, row); /* * Using KV API to access keyspace of a table having ttl defined, * there would be no valid ttl specified, so these operations do * not need delete_table privilege. */ final Key kvKey = TableKey.createKey(row.getTable(), row, false).getKey(); OpAccessCheckTestUtils.testValidPutOps( testUserStore, superUserStore, kvKey, getKVValueFromRow(row)); } /* * Prepares test tables and roles. */ private static void prepareTest() throws Exception { execStatement(superUserStore, "create table " + TEST_TABLE + TEST_TABLE_DEF); execStatement(superUserStore, "create index " + TEST_TABLE_INDEX + " on " + TEST_TABLE + " (" + TEST_TABLE_INDEX_FIELD + ")"); execStatement(superUserStore, "create table " + TEST_CHILD_TABLE + TEST_CHILD_TABLE_DEF); execStatement(superUserStore, "create index " + TEST_CHILD_TABLE_INDEX + " on " + TEST_CHILD_TABLE + " (" + TEST_CHILD_TABLE_INDEX_FIELD + ")"); execStatement(superUserStore, "create table " + TEST_TABLE_B + TEST_TABLE_B_DEF); execStatement(superUserStore, "create table " + TEST_TTL_TABLE + TEST_TABLE_DEF + " using ttl 5 days"); execStatement(superUserStore, "create role testrole"); execStatement(superUserStore, "grant " + TEST_ROLE + " to user " + TEST_USER); execStatement(superUserStore, "grant readwrite to user " + SUPER_USER); } /* Creates a row for test table */ private static Row createOneRowForTable(String table) { final Table testTable = superUserStore.getTableAPI().getTable(table); assertNotNull(testTable); final String rowJson = tableRowMap.get(table); assertNotNull(rowJson); return testTable.createRowFromJson(rowJson, true); } private static void populateTableWithOneRow(KVStore store, Row row) { assertNotNull(store.getTableAPI().put(row, null, null)); } private static void cleanOneRowFromTable(KVStore store, Row row) { final PrimaryKey key = row.createPrimaryKey(); store.getTableAPI().delete(key, null, null); } private static void revokePrivFromRole(String role, KVStorePrivilegeLabel... sysPriv) throws Exception { for (KVStorePrivilegeLabel label : sysPriv) { execStatement(superUserStore, "revoke " + label + " from " + role); assertRoleHasNoPriv(role, label.toString()); } } private static void revokePrivFromRole(String role, String table, KVStorePrivilegeLabel... tablePriv) throws Exception { for (KVStorePrivilegeLabel label : tablePriv) { execStatement(superUserStore, "revoke " + label + " on " + table + " from " + role); assertRoleHasNoPriv(role, toTablePrivStr(label, table)); } } private static void grantPrivToRole(String role, KVStorePrivilegeLabel... sysPriv) throws Exception { for (KVStorePrivilegeLabel label : sysPriv) { execStatement(superUserStore, "grant " + label + " to " + role); assertRoleHasPriv(role, label.toString()); } } private static void grantPrivToRole(String role, String table, KVStorePrivilegeLabel... tablePriv) throws Exception { for (KVStorePrivilegeLabel label : tablePriv) { execStatement(superUserStore, "grant " + label + " on " + table + " to " + role); assertRoleHasPriv(role, toTablePrivStr(label, table)); } } private static void assertRoleHasNoPriv(String role, String privStr) { final StatementResult result = superUserStore.executeSync("show role " + role); assertThat(result.getResult(), not(containsString(privStr))); } private static void assertRoleHasPriv(String role, String privStr) { final StatementResult result = superUserStore.executeSync("show role " + role); assertThat(result.getResult(), containsString(privStr)); } private static String toTablePrivStr(KVStorePrivilegeLabel tablePriv, String table) { assertEquals(tablePriv.getType(), PrivilegeType.TABLE); return String.format("%s(%s)", tablePriv, table); } /* Keep this method around in case we need it some time */ @SuppressWarnings("unused") private static String toNamespacePrivStr(KVStorePrivilegeLabel nsPriv, String namespace) { assertEquals(nsPriv.getType(), PrivilegeType.NAMESPACE); return String.format("%s(%s)", nsPriv, namespace); } private static void initStores() { try { final LoginCredentials superCreds = new PasswordCredentials(SUPER_USER, SUPER_USER_PWD.toCharArray()); final LoginCredentials testCreds = new PasswordCredentials(TEST_USER, TEST_USER_PWD.toCharArray()); superUserStore = createStore.getSecureStore(superCreds); testUserStore = createStore.getSecureStore(testCreds); } catch (Exception e) { fail("unexpected exception in user login: " + e); } } private static void closeStores() { if (superUserStore != null) { superUserStore.close(); } if (testUserStore != null) { testUserStore.close(); } } private static void tableIdMismatchFailure(UnauthorizedException fe) { assertThat("table id not match", fe.getMessage(), containsString("differs from table id in key")); } private static void tableNotFoundFailure(UnauthorizedException uae) { assertThat("table not found from key", uae.getMessage(), containsString("Key does not identify a table")); } private interface DeniedOp { void execute() throws FaultException; } private static abstract class DeniedOpExecution { void perform(DeniedOp... ops) { for (DeniedOp op : ops) { try { op.execute(); fail("Expected UnauthorizedException"); } catch (UnauthorizedException fe) { assertException(fe); } } } abstract void assertException(UnauthorizedException fe); } }
oracle/coherence
35,177
prj/coherence-core/src/main/java/com/tangosol/util/LiteSet.java
/* * Copyright (c) 2000, 2022, Oracle and/or its affiliates. * * Licensed under the Universal Permissive License v 1.0 as shown at * https://oss.oracle.com/licenses/upl. */ package com.tangosol.util; import com.tangosol.io.ExternalizableLite; import java.io.DataInput; import java.io.DataOutput; import java.io.Externalizable; import java.io.IOException; import java.io.NotActiveException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.lang.reflect.Array; import java.util.AbstractSet; import java.util.Collection; import java.util.Enumeration; import java.util.HashSet; import java.util.Iterator; import java.util.NoSuchElementException; import java.util.Set; /** * An implementation of java.util.Set that is optimal (in terms of both size * and speed) for very small sets of data but still works excellently with * large sets of data. This implementation is not thread-safe. * <p> * The LiteSet implementation switches at runtime between several different * sub-implementations for storing the set of objects, described here: * <ol> * <li>"empty set" - a set that contains no data; * <li>"single entry" - a reference directly to an item is used to represent * a set with exactly one item in it; * <li>"Object[]" - a reference is held to an array of Objects that store * the contents of the Set; the item limit for this implementation is * determined by the THRESHOLD constant; * <li>"delegation" - for more than THRESHOLD items, a set is created to * delegate the set management to; sub-classes can override the default * delegation class (java.util.HashSet) by overriding the factory method * {@link #instantiateSet() instantiateSet()}. * </ol> * <p> * The LiteSet implementation supports the null value. * * @author cp 06/02/99 */ public class LiteSet<E> extends AbstractSet<E> implements Cloneable, Externalizable, ExternalizableLite { // ----- constructors --------------------------------------------------- /** * Construct a LiteSet */ public LiteSet() { } /** * Construct a LiteSet containing the elements of the passed Collection. * * @param collection a Collection */ public LiteSet(Collection<? extends E> collection) { addAll(collection); } // ----- Set interface -------------------------------------------------- /** * Determine if this Set is empty. * * @return true iff this Set is empty */ public boolean isEmpty() { return m_nImpl == I_EMPTY; } /** * Returns the number of elements in this Set (its cardinality). * * @return the number of elements in this Set */ public int size() { switch (m_nImpl) { case I_EMPTY: return 0; case I_SINGLE: return 1; case I_ARRAY_1: case I_ARRAY_2: case I_ARRAY_3: case I_ARRAY_4: case I_ARRAY_5: case I_ARRAY_6: case I_ARRAY_7: case I_ARRAY_8: return m_nImpl - I_ARRAY_1 + 1; case I_OTHER: return ((Set<E>) m_oContents).size(); default: throw new IllegalStateException(); } } /** * Returns <tt>true</tt> if this Set contains the specified element. More * formally, returns <tt>true</tt> if and only if this Set contains an * element <code>e</code> such that * <code>(o==null ? e==null : o.equals(e))</code>. * * @param o the object to check for * * @return <tt>true</tt> if this Set contains the specified element */ public boolean contains(Object o) { switch (m_nImpl) { case I_EMPTY: return false; case I_SINGLE: return Base.equals(o, m_oContents); case I_ARRAY_1: case I_ARRAY_2: case I_ARRAY_3: case I_ARRAY_4: case I_ARRAY_5: case I_ARRAY_6: case I_ARRAY_7: case I_ARRAY_8: { // "Object[]" implementation Object[] ao = (Object[]) m_oContents; int c = m_nImpl - I_ARRAY_1 + 1; return indexOf(ao, c, o) >= 0; } case I_OTHER: return ((Set<E>) m_oContents).contains(o); default: throw new IllegalStateException(); } } /** * Returns an Iterator over the elements in this Set. The elements are * returned in an arbitrary order. * * @return an iterator over the elements in this Set */ public Iterator<E> iterator() { return isEmpty() ? NullImplementation.getIterator() : new Iterator<E>() { /** * Returns <tt>true</tt> if the iteration has more elements. (In * other words, returns <tt>true</tt> if <tt>next</tt> would * return an element rather than throwing an exception.) * * @return <tt>true</tt> if the iterator has more elements */ public boolean hasNext() { return (m_iPrev + 1 < m_aVals.length); } /** * Returns the next element in the iteration. * * @return the next element in the iteration * * @exception NoSuchElementException iteration has no more * elements */ public E next() { int iNext = m_iPrev + 1; if (iNext < m_aVals.length) { m_iPrev = iNext; m_fCanRemove = true; return (E) m_aVals[iNext]; } else { throw new NoSuchElementException(); } } /** * Removes from the underlying set the last element * returned by the iterator. This method can be called only once * per call to <tt>next</tt>. The behavior of an iterator is * unspecified if the underlying set is modified while the * iteration is in progress in any way other than by calling this * method. * * @exception IllegalStateException if the <tt>next</tt> method * has not yet been called, or the <tt>remove</tt> * method has already been called after the last call * to the <tt>next</tt> method */ public void remove() { if (m_fCanRemove) { m_fCanRemove = false; LiteSet.this.remove(m_aVals[m_iPrev]); } else { throw new IllegalStateException(); } } Object[] m_aVals = LiteSet.this.toArray(); int m_iPrev = -1; boolean m_fCanRemove = false; }; } /** * Returns an Enumerator over the elements in this Set. The elements are * returned in an arbitrary order. * * @return an Enumerator over the elements in this Set */ public Enumeration<E> elements() { return isEmpty() ? NullImplementation.getEnumeration() : new Enumeration<E>() { /** * Returns <tt>true</tt> if the Enumeration has more elements. (In * other words, returns <tt>true</tt> if <tt>nextElement</tt> * would return an element rather than throwing an exception.) * * @return <tt>true</tt> if the Enumeration has more elements */ public boolean hasMoreElements() { return (m_iNext < m_aVals.length); } /** * Returns the next element in the Enumeration. * * @return the next element in the Enumeration * * @exception NoSuchElementException Enumeration has no more * elements */ public E nextElement() { if (m_iNext < m_aVals.length) { return (E) m_aVals[m_iNext++]; } else { throw new NoSuchElementException(); } } Object[] m_aVals = LiteSet.this.toArray(); int m_iNext = 0; }; } /** * Returns an array containing all of the elements in this Set. Obeys the * general contract of the <tt>Set.toArray</tt> method. * * @return an array containing all of the elements in this Set */ public Object[] toArray() { switch (m_nImpl) { case I_EMPTY: return NO_OBJECTS; case I_SINGLE: return new Object[] {m_oContents}; case I_ARRAY_1: case I_ARRAY_2: case I_ARRAY_3: case I_ARRAY_4: case I_ARRAY_5: case I_ARRAY_6: case I_ARRAY_7: { // "Object[]" implementation Object[] ao = (Object[]) m_oContents; int c = m_nImpl - I_ARRAY_1 + 1; Object[] aoResult = new Object[c]; System.arraycopy(ao, 0, aoResult, 0, c); return aoResult; } case I_ARRAY_8: return (Object[]) ((Object[]) m_oContents).clone(); case I_OTHER: return ((Set) m_oContents).toArray(); default: throw new IllegalStateException(); } } /** * Returns an array (whose runtime type is that of the specified array) * containing all of the elements in this Set. Obeys the general contract * of the <tt>Set.toArray(Object[])</tt> method. * * @param aDest the array into which the elements of this Set are to be * stored, if it is big enough; otherwise, a new array of * the same runtime type is allocated for this purpose * * @return an array containing the elements of this Set * * @throws ArrayStoreException if the component type of <tt>aDest</tt> is * not a supertype of the type of every element in this Set */ public Object[] toArray(Object aDest[]) { if (m_nImpl == I_OTHER) { return ((Set) m_oContents).toArray(aDest); } Object[] aSrc = toArray(); // not optimal, but easy int cSrc = aSrc.length; int cDest = aDest.length; if (cDest < cSrc) { cDest = cSrc; aDest = (Object[]) Array.newInstance( aDest.getClass().getComponentType(), cDest); } if (cSrc > 0) { System.arraycopy(aSrc, 0, aDest, 0, cSrc); } if (cDest > cSrc) { aDest[cSrc] = null; } return aDest; } /** * Ensures that this Set contains the specified element. Returns * <tt>true</tt> if the Set changed as a result of the call. (Returns * <tt>false</tt> if this Set already contains the specified element.) * * @param o element to be added to this Set * * @return <tt>true</tt> if this Set did not already contain the * specified element */ public boolean add(E o) { switch (m_nImpl) { case I_EMPTY: // growing from an empty set to the "single entry" // implementation m_nImpl = I_SINGLE; m_oContents = o; return true; case I_SINGLE: { // check if this set already contains the object Object oContents = m_oContents; if (Base.equals(o, oContents)) { return false; } // growing from a "single entry" set to an "Object[]" // implementation Object[] ao = new Object[THRESHOLD]; ao[0] = oContents; ao[1] = o; m_nImpl = I_ARRAY_2; m_oContents = ao; return true; } case I_ARRAY_1: case I_ARRAY_2: case I_ARRAY_3: case I_ARRAY_4: case I_ARRAY_5: case I_ARRAY_6: case I_ARRAY_7: case I_ARRAY_8: { // "Object[]" implementation int nImpl = m_nImpl; Object[] ao = (Object[]) m_oContents; int c = nImpl - I_ARRAY_1 + 1; if (indexOf(ao, c, o) >= 0) { return false; } // check if adding the object exceeds the "lite" threshold if (c >= THRESHOLD) { // time to switch to a different set implementation Set set = instantiateSet(); set.addAll(this); set.add(o); m_nImpl = I_OTHER; m_oContents = set; } else { // use the next available element in the array ao[c] = o; m_nImpl = (byte) (nImpl + 1); } return true; } case I_OTHER: return ((Set<E>) m_oContents).add(o); default: throw new IllegalStateException(); } } /** * Removes the specified element from this Set if it is present. More * formally, removes an element <code>e</code> such that * <code>(o==null ? e==null : o.equals(e))</code>, if the Set contains * such an element. Returns <tt>true</tt> if the Set contained the * specified element (or equivalently, if the Set changed as a result of * the call). The Set will not contain the specified element once the call * returns. * * @param o object to be removed from this Set, if present * * @return true if the Set contained the specified element */ public boolean remove(Object o) { switch (m_nImpl) { case I_EMPTY: return false; case I_SINGLE: { if (Base.equals(o, m_oContents)) { // shrink to an "empty set" m_nImpl = I_EMPTY; m_oContents = null; return true; } } return false; case I_ARRAY_1: case I_ARRAY_2: case I_ARRAY_3: case I_ARRAY_4: case I_ARRAY_5: case I_ARRAY_6: case I_ARRAY_7: case I_ARRAY_8: { // "Object[]" implementation int nImpl = m_nImpl; Object[] ao = (Object[]) m_oContents; int c = nImpl - I_ARRAY_1 + 1; int i = indexOf(ao, c, o); if (i < 0) { return false; } if (c == 1) { m_nImpl = I_EMPTY; m_oContents = null; } else { System.arraycopy(ao, i + 1, ao, i, c - i - 1); ao[c-1] = null; m_nImpl = (byte) --nImpl; } return true; } case I_OTHER: { Set set = (Set) m_oContents; boolean fRemoved = set.remove(o); if (fRemoved) { checkShrinkFromOther(); } return fRemoved; } default: throw new IllegalStateException(); } } /** * Returns <tt>true</tt> if this Set contains all of the elements in the * specified Collection. * * @param collection Collection to be checked for containment in this * Set * * @return <tt>true</tt> if this Set contains all of the elements in the * specified Collection */ public boolean containsAll(Collection<?> collection) { switch (m_nImpl) { case I_EMPTY: // since this set is empty, so must the other be return collection.isEmpty(); case I_OTHER: // (assume the delegatee is more efficient) return ((Set<E>) m_oContents).containsAll(collection); default: return super.containsAll(collection); } } /** * Adds all of the elements in the specified Collection to this Set * if they are not already present. If the specified Collection is also a * Set, the <tt>addAll</tt> operation effectively modifies this Set so * that its value is the <i>union</i> of the two Sets. * * @param collection Collection whose elements are to be added to this * Set * * @return <tt>true</tt> if this Set changed as a result of the call */ public boolean addAll(Collection<? extends E> collection) { switch (m_nImpl) { case I_EMPTY: // "empty set" implementation (adding all to nothing is easy) { int c = collection.size(); switch (c) { case 0: return false; case 1: { // growing from an empty set to the "single entry" // implementation m_nImpl = I_SINGLE; m_oContents = collection.iterator().next(); } return true; default: return super.addAll(collection); } } case I_OTHER: // (assume the delegatee is more efficient) return ((Set<E>) m_oContents).addAll(collection); default: return super.addAll(collection); } } /** * Retains only the elements in this Set that are contained in the * specified Collection. In other words, removes from this Set all of its * elements that are not contained in the specified Collection. If the * specified Collection is also a Set, this operation effectively modifies * this Set so that its value is the <i>intersection</i> of the two Sets. * * @param collection collection that defines which elements this Set will * retain * * @return <tt>true</tt> if this Set changed as a result of the call */ public boolean retainAll(Collection<?> collection) { switch (m_nImpl) { case I_EMPTY: return false; case I_OTHER: // (assume the delegatee is more efficient) { boolean fChanged = ((Set<E>) m_oContents).retainAll(collection); if (fChanged) { checkShrinkFromOther(); } return fChanged; } default: return super.retainAll(collection); } } /** * Removes from this Set all of its elements that are contained in the * specified Collection. If the specified Collection is also a Set, this * operation effectively modifies this Set so that its value is the * <i>asymmetric set difference</i> of the two Sets. * * @param collection Collection that defines which elements will be * removed from this Set * * @return <tt>true</tt> if this Set changed as a result of the call */ public boolean removeAll(Collection<?> collection) { switch (m_nImpl) { case I_EMPTY: return false; case I_OTHER: // (assume the delegatee is more efficient) { boolean fChanged = ((Set<E>) m_oContents).removeAll(collection); if (fChanged) { checkShrinkFromOther(); } return fChanged; } default: return super.removeAll(collection); } } /** * Removes all of the elements from this Set. This Set will be empty after * this call returns. */ public void clear() { m_nImpl = I_EMPTY; m_oContents = null; } // ----- Cloneable interface -------------------------------------------- /** * Create a clone of this Set. * * @return a clone of this Set */ public Object clone() { LiteSet that; try { that = (LiteSet) super.clone(); } catch (CloneNotSupportedException e) { throw Base.ensureRuntimeException(e); } switch (this.m_nImpl) { case I_EMPTY: case I_SINGLE: // nothing to do break; case I_ARRAY_1: case I_ARRAY_2: case I_ARRAY_3: case I_ARRAY_4: case I_ARRAY_5: case I_ARRAY_6: case I_ARRAY_7: case I_ARRAY_8: that.m_oContents = ((Object[]) this.m_oContents).clone(); break; case I_OTHER: Set setThis = (Set) this.m_oContents; Set setThat = that.instantiateSet(); setThat.addAll(setThis); that.m_oContents = setThat; break; default: throw new IllegalStateException(); } return that; } // ----- Externalizable interface --------------------------------------- /** * Initialize this object from the data in the passed ObjectInput stream. * * @param in the stream to read data from in order to restore the object * * @exception IOException if an I/O exception occurs */ public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { if (!isEmpty()) { throw new NotActiveException(); } int c = in.readInt(); if (c > 0) { initFromArray((Object[]) in.readObject(), c); } } /** * Write this object's data to the passed ObjectOutput stream. * * @param out the stream to write the object to * * @exception IOException if an I/O exception occurs */ public synchronized void writeExternal(ObjectOutput out) throws IOException { // format is int size followed by (if size > 0) an array of values; // note that the array size does not have to equal the Set size int nImpl = m_nImpl; switch (nImpl) { case I_EMPTY: out.writeInt(0); break; case I_SINGLE: out.writeInt(1); out.writeObject(new Object[] {m_oContents}); break; case I_ARRAY_1: case I_ARRAY_2: case I_ARRAY_3: case I_ARRAY_4: case I_ARRAY_5: case I_ARRAY_6: case I_ARRAY_7: case I_ARRAY_8: out.writeInt(nImpl - I_ARRAY_1 + 1); out.writeObject((Object[]) m_oContents); break; case I_OTHER: Object[] ao = ((Set) m_oContents).toArray(); out.writeInt(ao.length); out.writeObject(ao); break; default: throw new IllegalStateException(); } } // ----- ExternalizableLite interface ----------------------------------- /** * {@inheritDoc} */ public void readExternal(DataInput in) throws IOException { if (!isEmpty()) { throw new NotActiveException(); } boolean fLite = in.readBoolean(); if (fLite) { readAndInitObjectArray(in); } else { Object[] ao = (Object[]) ExternalizableHelper.readObject(in); initFromArray(ao, ao.length); } } /** * {@inheritDoc} */ public synchronized void writeExternal(DataOutput out) throws IOException { // scan through the contents searching for anything that cannot be // streamed to a DataOutput (i.e. anything that requires Java Object // serialization); note that the toArray() also resolves concerns // related to the synchronization of the data structure itself during // serialization boolean fLite = true; Object[] ao = toArray(); int c = ao.length; final int FMT_OBJ_SER = ExternalizableHelper.FMT_OBJ_SER; for (int i = 0; i < c; ++i) { if (ExternalizableHelper.getStreamFormat(ao[i]) == FMT_OBJ_SER) { fLite = false; break; } } out.writeBoolean(fLite); if (fLite) { ExternalizableHelper.writeInt(out, c); for (int i = 0; i < c; ++i) { ExternalizableHelper.writeObject(out, ao[i]); } } else { ExternalizableHelper.writeObject(out, ao); } } // ----- internal methods ----------------------------------------------- /** * Read an array of objects from a DataInput stream and initialize * the internal structures of this set. * * @param in a ObjectInputStream stream to read from * * @throws IOException if an I/O exception occurs * * @since 22.09 */ private void readAndInitObjectArray(DataInput in) throws IOException { int cLength = ExternalizableHelper.readInt(in); int cCap = cLength <= 1 || cLength > THRESHOLD ? cLength : THRESHOLD; // JEP-290 - ensure we can allocate this array ExternalizableHelper.validateLoadArray(Object[].class, cCap, in); Object[] oa = cCap <= 0 ? new Object[0] : cCap < ExternalizableHelper.CHUNK_THRESHOLD >> 4 ? readObjectArray(in, cCap, cLength) : readLargeObjectArray(in, cCap); initFromArray(oa, cLength); } /** * Read an array of the specified number of objects from a DataInput stream. * * @param in a DataInput stream to read from * @param cLength length to read * @param cRead the number of elements to read * * @return an array of objects * * @throws IOException if an I/O exception occurs * * @since 22.09 */ private static Object[] readObjectArray(DataInput in, int cLength, int cRead) throws IOException { Object[] ao = new Object[cLength]; for (int i = 0; i < cRead; i++) { ao[i] = ExternalizableHelper.readObject(in); } return ao; } /** * Read an array of objects with length larger than {@link ExternalizableHelper#CHUNK_THRESHOLD} {@literal >>} 4. * * @param in a DataInput stream to read from * @param cLength length to read * * @return an array of objects * * @throws IOException if an I/O exception occurs * * @since 22.09 */ private static Object[] readLargeObjectArray(DataInput in, int cLength) throws IOException { int cBatchMax = ExternalizableHelper.CHUNK_SIZE >> 4; int cBatch = cLength / cBatchMax + 1; Object[] aMerged = null; int cRead = 0; int cAllocate = cBatchMax; Object[] ao; for (int i = 0; i < cBatch && cRead < cLength; i++) { ao = readObjectArray(in, cAllocate, cAllocate); aMerged = ExternalizableHelper.mergeArray(aMerged, ao); cRead += ao.length; cAllocate = Math.min(cLength - cRead, cBatchMax); } return aMerged; } /** * (Factory pattern) Instantiate a Set object to store items in once * the "lite" threshold has been exceeded. This method permits inheriting * classes to easily override the choice of the Set object. * * @return an instance of Set */ protected Set<E> instantiateSet() { return new HashSet<>(); } /** * Scan up to the first <tt>c</tt> elements of the passed array * <tt>ao</tt> looking for the specified Object <tt>o</tt>. If it is * found, return its position <tt>i</tt> in the array such that * <tt>(0 &lt;= i &lt; c)</tt>. If it is not found, return <tt>-1</tt>. * * @param ao the array of objects to search * @param c the number of elements in the array to search * @param o the object to look for * * @return the index of the object, if found; otherwise -1 */ private int indexOf(Object[] ao, int c, Object o) { // first quick-scan by reference for (int i = 0; i < c; ++i) { if (o == ao[i]) { return i; } } // slow scan by equals() if (o != null) { for (int i = 0; i < c; ++i) { if (o.equals(ao[i])) { return i; } } } return -1; } /** * Initialize the contents of this Set from the passed array <tt>ao</tt> * containing <tt>c</tt> values. * * @param ao the array that contains the values to place in this Set * @param c the number of values that will be placed into this Set */ protected void initFromArray(Object[] ao, int c) { switch (c) { case 0: m_oContents = null; m_nImpl = I_EMPTY; break; case 1: m_oContents = ao[0]; m_nImpl = I_SINGLE; break; case 2: case 3: case 4: case 5: case 6: case 7: case 8: if (ao.length != THRESHOLD) { Object[] aoPresize = new Object[THRESHOLD]; System.arraycopy(ao, 0, aoPresize, 0, c); ao = aoPresize; } m_oContents = ao; m_nImpl = (byte) (I_ARRAY_1 + c - 1); break; default: { Set set = instantiateSet(); for (int i = 0; i < c; ++i) { set.add(ao[i]); } m_oContents = set; m_nImpl = I_OTHER; } break; } assert size() == c; } /** * After a mutation operation has reduced the size of an underlying Set, * check if the delegation model should be replaced with a more size- * efficient storage approach, and switch accordingly. */ protected void checkShrinkFromOther() { assert m_nImpl == I_OTHER; // check if the set is now significantly below the "lite" // threshold Set set = (Set) m_oContents; int c = set.size(); switch (c) { case 0: m_nImpl = I_EMPTY; m_oContents = null; break; case 1: m_nImpl = I_SINGLE; m_oContents = set.toArray()[0]; break; case 2: case 3: case 4: { // shrink to "Object[]" implementation Object[] ao = set.toArray(new Object[THRESHOLD]); m_nImpl = (byte) (I_ARRAY_1 + c - 1); m_oContents = ao; } break; } } // ----- constants ------------------------------------------------------ /** * A constant array of zero size. (This saves having to allocate what * should be a constant.) */ private static final Object[] NO_OBJECTS = new Object[0]; /** * The default point above which the LiteSet delegates to another set * implementation. */ private static final int THRESHOLD = 8; /** * Implementation: Empty set. */ private static final int I_EMPTY = 0; /** * Implementation: Single-item set. */ private static final int I_SINGLE = 1; /** * Implementation: Array set of 1 item. */ private static final int I_ARRAY_1 = 2; /** * Implementation: Array set of 2 items. */ private static final int I_ARRAY_2 = 3; /** * Implementation: Array set of 3 items. */ private static final int I_ARRAY_3 = 4; /** * Implementation: Array set of 4 items. */ private static final int I_ARRAY_4 = 5; /** * Implementation: Array set of 5 items. */ private static final int I_ARRAY_5 = 6; /** * Implementation: Array set of 6 items. */ private static final int I_ARRAY_6 = 7; /** * Implementation: Array set of 7 items. */ private static final int I_ARRAY_7 = 8; /** * Implementation: Array set of 8 items. */ private static final int I_ARRAY_8 = 9; /** * Implementation: Delegation. */ private static final int I_OTHER = 10; // ----- data members --------------------------------------------------- /** * Implementation, one of I_EMPTY, I_SINGLE, I_ARRAY_* or I_OTHER. */ private byte m_nImpl; /** * The set contents, based on the implementation being used. */ private Object m_oContents; }
googleapis/google-cloud-java
35,214
java-os-config/proto-google-cloud-os-config-v1/src/main/java/com/google/cloud/osconfig/v1/UpdateOSPolicyAssignmentRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/osconfig/v1/os_policy_assignments.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.osconfig.v1; /** * * * <pre> * A request message to update an OS policy assignment * </pre> * * Protobuf type {@code google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest} */ public final class UpdateOSPolicyAssignmentRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest) UpdateOSPolicyAssignmentRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateOSPolicyAssignmentRequest.newBuilder() to construct. private UpdateOSPolicyAssignmentRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateOSPolicyAssignmentRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateOSPolicyAssignmentRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.osconfig.v1.OsPolicyAssignmentsProto .internal_static_google_cloud_osconfig_v1_UpdateOSPolicyAssignmentRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.osconfig.v1.OsPolicyAssignmentsProto .internal_static_google_cloud_osconfig_v1_UpdateOSPolicyAssignmentRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest.class, com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest.Builder.class); } private int bitField0_; public static final int OS_POLICY_ASSIGNMENT_FIELD_NUMBER = 1; private com.google.cloud.osconfig.v1.OSPolicyAssignment osPolicyAssignment_; /** * * * <pre> * Required. The updated OS policy assignment. * </pre> * * <code> * .google.cloud.osconfig.v1.OSPolicyAssignment os_policy_assignment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the osPolicyAssignment field is set. */ @java.lang.Override public boolean hasOsPolicyAssignment() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The updated OS policy assignment. * </pre> * * <code> * .google.cloud.osconfig.v1.OSPolicyAssignment os_policy_assignment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The osPolicyAssignment. */ @java.lang.Override public com.google.cloud.osconfig.v1.OSPolicyAssignment getOsPolicyAssignment() { return osPolicyAssignment_ == null ? com.google.cloud.osconfig.v1.OSPolicyAssignment.getDefaultInstance() : osPolicyAssignment_; } /** * * * <pre> * Required. The updated OS policy assignment. * </pre> * * <code> * .google.cloud.osconfig.v1.OSPolicyAssignment os_policy_assignment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.osconfig.v1.OSPolicyAssignmentOrBuilder getOsPolicyAssignmentOrBuilder() { return osPolicyAssignment_ == null ? com.google.cloud.osconfig.v1.OSPolicyAssignment.getDefaultInstance() : osPolicyAssignment_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Optional. Field mask that controls which fields of the assignment should be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. Field mask that controls which fields of the assignment should be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Optional. Field mask that controls which fields of the assignment should be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getOsPolicyAssignment()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getOsPolicyAssignment()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest)) { return super.equals(obj); } com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest other = (com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest) obj; if (hasOsPolicyAssignment() != other.hasOsPolicyAssignment()) return false; if (hasOsPolicyAssignment()) { if (!getOsPolicyAssignment().equals(other.getOsPolicyAssignment())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasOsPolicyAssignment()) { hash = (37 * hash) + OS_POLICY_ASSIGNMENT_FIELD_NUMBER; hash = (53 * hash) + getOsPolicyAssignment().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A request message to update an OS policy assignment * </pre> * * Protobuf type {@code google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest) com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.osconfig.v1.OsPolicyAssignmentsProto .internal_static_google_cloud_osconfig_v1_UpdateOSPolicyAssignmentRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.osconfig.v1.OsPolicyAssignmentsProto .internal_static_google_cloud_osconfig_v1_UpdateOSPolicyAssignmentRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest.class, com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest.Builder.class); } // Construct using com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getOsPolicyAssignmentFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; osPolicyAssignment_ = null; if (osPolicyAssignmentBuilder_ != null) { osPolicyAssignmentBuilder_.dispose(); osPolicyAssignmentBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.osconfig.v1.OsPolicyAssignmentsProto .internal_static_google_cloud_osconfig_v1_UpdateOSPolicyAssignmentRequest_descriptor; } @java.lang.Override public com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest getDefaultInstanceForType() { return com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest build() { com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest buildPartial() { com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest result = new com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.osPolicyAssignment_ = osPolicyAssignmentBuilder_ == null ? osPolicyAssignment_ : osPolicyAssignmentBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest) { return mergeFrom((com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest other) { if (other == com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest.getDefaultInstance()) return this; if (other.hasOsPolicyAssignment()) { mergeOsPolicyAssignment(other.getOsPolicyAssignment()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getOsPolicyAssignmentFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.osconfig.v1.OSPolicyAssignment osPolicyAssignment_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.osconfig.v1.OSPolicyAssignment, com.google.cloud.osconfig.v1.OSPolicyAssignment.Builder, com.google.cloud.osconfig.v1.OSPolicyAssignmentOrBuilder> osPolicyAssignmentBuilder_; /** * * * <pre> * Required. The updated OS policy assignment. * </pre> * * <code> * .google.cloud.osconfig.v1.OSPolicyAssignment os_policy_assignment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the osPolicyAssignment field is set. */ public boolean hasOsPolicyAssignment() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The updated OS policy assignment. * </pre> * * <code> * .google.cloud.osconfig.v1.OSPolicyAssignment os_policy_assignment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The osPolicyAssignment. */ public com.google.cloud.osconfig.v1.OSPolicyAssignment getOsPolicyAssignment() { if (osPolicyAssignmentBuilder_ == null) { return osPolicyAssignment_ == null ? com.google.cloud.osconfig.v1.OSPolicyAssignment.getDefaultInstance() : osPolicyAssignment_; } else { return osPolicyAssignmentBuilder_.getMessage(); } } /** * * * <pre> * Required. The updated OS policy assignment. * </pre> * * <code> * .google.cloud.osconfig.v1.OSPolicyAssignment os_policy_assignment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setOsPolicyAssignment(com.google.cloud.osconfig.v1.OSPolicyAssignment value) { if (osPolicyAssignmentBuilder_ == null) { if (value == null) { throw new NullPointerException(); } osPolicyAssignment_ = value; } else { osPolicyAssignmentBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The updated OS policy assignment. * </pre> * * <code> * .google.cloud.osconfig.v1.OSPolicyAssignment os_policy_assignment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setOsPolicyAssignment( com.google.cloud.osconfig.v1.OSPolicyAssignment.Builder builderForValue) { if (osPolicyAssignmentBuilder_ == null) { osPolicyAssignment_ = builderForValue.build(); } else { osPolicyAssignmentBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The updated OS policy assignment. * </pre> * * <code> * .google.cloud.osconfig.v1.OSPolicyAssignment os_policy_assignment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeOsPolicyAssignment(com.google.cloud.osconfig.v1.OSPolicyAssignment value) { if (osPolicyAssignmentBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && osPolicyAssignment_ != null && osPolicyAssignment_ != com.google.cloud.osconfig.v1.OSPolicyAssignment.getDefaultInstance()) { getOsPolicyAssignmentBuilder().mergeFrom(value); } else { osPolicyAssignment_ = value; } } else { osPolicyAssignmentBuilder_.mergeFrom(value); } if (osPolicyAssignment_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The updated OS policy assignment. * </pre> * * <code> * .google.cloud.osconfig.v1.OSPolicyAssignment os_policy_assignment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearOsPolicyAssignment() { bitField0_ = (bitField0_ & ~0x00000001); osPolicyAssignment_ = null; if (osPolicyAssignmentBuilder_ != null) { osPolicyAssignmentBuilder_.dispose(); osPolicyAssignmentBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The updated OS policy assignment. * </pre> * * <code> * .google.cloud.osconfig.v1.OSPolicyAssignment os_policy_assignment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.osconfig.v1.OSPolicyAssignment.Builder getOsPolicyAssignmentBuilder() { bitField0_ |= 0x00000001; onChanged(); return getOsPolicyAssignmentFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The updated OS policy assignment. * </pre> * * <code> * .google.cloud.osconfig.v1.OSPolicyAssignment os_policy_assignment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.osconfig.v1.OSPolicyAssignmentOrBuilder getOsPolicyAssignmentOrBuilder() { if (osPolicyAssignmentBuilder_ != null) { return osPolicyAssignmentBuilder_.getMessageOrBuilder(); } else { return osPolicyAssignment_ == null ? com.google.cloud.osconfig.v1.OSPolicyAssignment.getDefaultInstance() : osPolicyAssignment_; } } /** * * * <pre> * Required. The updated OS policy assignment. * </pre> * * <code> * .google.cloud.osconfig.v1.OSPolicyAssignment os_policy_assignment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.osconfig.v1.OSPolicyAssignment, com.google.cloud.osconfig.v1.OSPolicyAssignment.Builder, com.google.cloud.osconfig.v1.OSPolicyAssignmentOrBuilder> getOsPolicyAssignmentFieldBuilder() { if (osPolicyAssignmentBuilder_ == null) { osPolicyAssignmentBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.osconfig.v1.OSPolicyAssignment, com.google.cloud.osconfig.v1.OSPolicyAssignment.Builder, com.google.cloud.osconfig.v1.OSPolicyAssignmentOrBuilder>( getOsPolicyAssignment(), getParentForChildren(), isClean()); osPolicyAssignment_ = null; } return osPolicyAssignmentBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Optional. Field mask that controls which fields of the assignment should be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. Field mask that controls which fields of the assignment should be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Optional. Field mask that controls which fields of the assignment should be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Field mask that controls which fields of the assignment should be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Field mask that controls which fields of the assignment should be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Optional. Field mask that controls which fields of the assignment should be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Optional. Field mask that controls which fields of the assignment should be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Optional. Field mask that controls which fields of the assignment should be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Optional. Field mask that controls which fields of the assignment should be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest) } // @@protoc_insertion_point(class_scope:google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest) private static final com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest(); } public static com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateOSPolicyAssignmentRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateOSPolicyAssignmentRequest>() { @java.lang.Override public UpdateOSPolicyAssignmentRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateOSPolicyAssignmentRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateOSPolicyAssignmentRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.osconfig.v1.UpdateOSPolicyAssignmentRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
openjdk/jdk8
35,056
jaxws/src/share/jaxws_classes/com/sun/xml/internal/xsom/impl/util/SchemaTreeTraverser.java
/* * Copyright (c) 1997, 2010, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.sun.xml.internal.xsom.impl.util; import com.sun.xml.internal.xsom.XSAnnotation; import com.sun.xml.internal.xsom.XSAttGroupDecl; import com.sun.xml.internal.xsom.XSAttributeDecl; import com.sun.xml.internal.xsom.XSAttributeUse; import com.sun.xml.internal.xsom.XSComplexType; import com.sun.xml.internal.xsom.XSContentType; import com.sun.xml.internal.xsom.XSElementDecl; import com.sun.xml.internal.xsom.XSFacet; import com.sun.xml.internal.xsom.XSIdentityConstraint; import com.sun.xml.internal.xsom.XSListSimpleType; import com.sun.xml.internal.xsom.XSModelGroup; import com.sun.xml.internal.xsom.XSModelGroupDecl; import com.sun.xml.internal.xsom.XSNotation; import com.sun.xml.internal.xsom.XSParticle; import com.sun.xml.internal.xsom.XSRestrictionSimpleType; import com.sun.xml.internal.xsom.XSSchema; import com.sun.xml.internal.xsom.XSSchemaSet; import com.sun.xml.internal.xsom.XSSimpleType; import com.sun.xml.internal.xsom.XSType; import com.sun.xml.internal.xsom.XSUnionSimpleType; import com.sun.xml.internal.xsom.XSWildcard; import com.sun.xml.internal.xsom.XSXPath; import com.sun.xml.internal.xsom.impl.Const; import com.sun.xml.internal.xsom.visitor.XSSimpleTypeVisitor; import com.sun.xml.internal.xsom.visitor.XSTermVisitor; import com.sun.xml.internal.xsom.visitor.XSVisitor; import org.xml.sax.Locator; import javax.swing.Box; import javax.swing.Icon; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JTree; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.DefaultTreeModel; import javax.swing.tree.TreeCellRenderer; import java.awt.Color; import java.awt.Component; import java.awt.FlowLayout; import java.awt.Font; import java.awt.Graphics; import java.math.BigInteger; import java.text.MessageFormat; import java.util.Iterator; /** * Generates approximated tree model for XML from a schema component. This is * not intended to be a fully-fledged round-trippable tree model. * * <h2>Usage of this class</h2> * * <ol> <li>Create a new instance.</li> <li>Call {@link * #visit(com.sun.xml.internal.xsom.XSSchemaSet)} function on your schema set.>/li> * <li>Retrieve the model using {@link #getModel()}. </li></ol> * * Every node in the resulting tree is a {@link SchemaTreeTraverser.SchemaTreeNode}, * and the model itself is {@link SchemaTreeTraverser.SchemaTreeModel}. You can * use {@link SchemaTreeTraverser.SchemaTreeCellRenderer} as a cell renderer for * your tree. * * @author Kirill Grouchnikov (kirillcool@yahoo.com) */ public class SchemaTreeTraverser implements XSVisitor, XSSimpleTypeVisitor { /** * The associated tree model. */ private SchemaTreeModel model; /** * The current node in the tree. */ private SchemaTreeNode currNode; /** * Tree model for schema hierarchy tree. * * @author Kirill Grouchnikov */ public static final class SchemaTreeModel extends DefaultTreeModel { /** * A simple constructor. Is made private to allow creating the root node * first. * * @param root The root node. */ private SchemaTreeModel(SchemaRootNode root) { super(root); } /** * A factory method for creating a new empty tree. * * @return New empty tree model. */ public static SchemaTreeModel getInstance() { SchemaRootNode root = new SchemaRootNode(); return new SchemaTreeModel(root); } public void addSchemaNode(SchemaTreeNode node) { ((SchemaRootNode) this.root).add(node); } } /** * The node of the schema hierarchy tree. * * @author Kirill Grouchnikov */ public static class SchemaTreeNode extends DefaultMutableTreeNode { /** * File name of the corresponding schema artifact. */ private String fileName; /** * Line number of the corresponding schema artifact. */ private int lineNumber; /** * The caption of the corresponding artifact. */ private String artifactName; /** * Simple constructor. * * @param artifactName Artifact name. * @param locator Artifact locator. */ public SchemaTreeNode(String artifactName, Locator locator) { this.artifactName = artifactName; if (locator == null) { this.fileName = null; } else { String filename = locator.getSystemId(); filename = filename.replaceAll("\u002520", " "); // strip leading protocol if (filename.startsWith("file:/")) { filename = filename.substring(6); } this.fileName = filename; this.lineNumber = locator.getLineNumber() - 1; } } /** * Returns the caption for <code>this</code> node. * * @return The caption for <code>this</code> node. */ public String getCaption() { return this.artifactName; } /** * @return Returns the file name of the corresponding schema artifact. */ public String getFileName() { return fileName; } /** * @param fileName The file name of the corresponding schema artifact to * set. */ public void setFileName(String fileName) { this.fileName = fileName; } /** * @return Returns the line number of the corresponding schema * artifact. */ public int getLineNumber() { return lineNumber; } /** * @param lineNumber The line number of the corresponding schema * artifact to set. */ public void setLineNumber(int lineNumber) { this.lineNumber = lineNumber; } } /** * The root node of the schema hierarchy tree. * * @author Kirill Grouchnikov */ public static class SchemaRootNode extends SchemaTreeNode { /** * A simple constructor. */ public SchemaRootNode() { super("Schema set", null); } } /** * Sample cell renderer for the schema tree. * * @author Kirill Grouchnikov */ public static class SchemaTreeCellRenderer extends JPanel implements TreeCellRenderer { /** * The icon label. */ protected final JLabel iconLabel; /** * The text label */ protected final JLabel nameLabel; /** * The selection indicator. */ private boolean isSelected; /** * Background color for selected cells (light brown). */ public final Color selectedBackground = new Color(255, 244, 232); /** * Foreground color for selected cells, both text and border (dark * brown). */ public final Color selectedForeground = new Color(64, 32, 0); /** * Default font for the text label. */ public final Font nameFont = new Font("Arial", Font.BOLD, 12); /** * Simple constructor. */ public SchemaTreeCellRenderer() { FlowLayout fl = new FlowLayout(FlowLayout.LEFT, 1, 1); this.setLayout(fl); this.iconLabel = new JLabel(); this.iconLabel.setOpaque(false); this.iconLabel.setBorder(null); this.add(this.iconLabel); // add some space this.add(Box.createHorizontalStrut(5)); this.nameLabel = new JLabel(); this.nameLabel.setOpaque(false); this.nameLabel.setBorder(null); this.nameLabel.setFont(nameFont); this.add(this.nameLabel); this.isSelected = false; this.setOpaque(false); this.setBorder(null); } /* * (non-Javadoc) * * @see javax.swing.JComponent#paintComponent(java.awt.Graphics) */ public final void paintComponent(Graphics g) { int width = this.getWidth(); int height = this.getHeight(); if (this.isSelected) { g.setColor(selectedBackground); g.fillRect(0, 0, width - 1, height - 1); g.setColor(selectedForeground); g.drawRect(0, 0, width - 1, height - 1); } super.paintComponent(g); } /** * Sets values for the icon and text of <code>this</code> renderer. * * @param icon Icon to show. * @param caption Text to show. * @param selected Selection indicator. If <code>true</code>, the * renderer will be shown with different background and * border settings. */ protected final void setValues(Icon icon, String caption, boolean selected) { this.iconLabel.setIcon(icon); this.nameLabel.setText(caption); this.isSelected = selected; if (selected) { this.nameLabel.setForeground(selectedForeground); } else { this.nameLabel.setForeground(Color.black); } } /* (non-Javadoc) * @see javax.swing.tree.TreeCellRenderer#getTreeCellRendererComponent(javax.swing.JTree, java.lang.Object, boolean, boolean, boolean, int, boolean) */ public final Component getTreeCellRendererComponent(JTree tree, Object value, boolean selected, boolean expanded, boolean leaf, int row, boolean hasFocus) { if (value instanceof SchemaTreeNode) { SchemaTreeNode stn = (SchemaTreeNode) value; this.setValues(null, stn.getCaption(), selected); return this; } throw new IllegalStateException("Unknown node"); } } /** * Simple constructor. */ public SchemaTreeTraverser() { this.model = SchemaTreeModel.getInstance(); this.currNode = (SchemaTreeNode) this.model.getRoot(); } /** * Retrieves the tree model of <code>this</code> traverser. * * @return Tree model of <code>this</code> traverser. */ public SchemaTreeModel getModel() { return model; } /** * Visits the root schema set. * * @param s Root schema set. */ public void visit(XSSchemaSet s) { for (XSSchema schema : s.getSchemas()) { schema(schema); } } /* (non-Javadoc) * @see com.sun.xml.internal.xsom.visitor.XSVisitor#schema(com.sun.xml.internal.xsom.XSSchema) */ public void schema(XSSchema s) { // QUICK HACK: don't print the built-in components if (s.getTargetNamespace().equals(Const.schemaNamespace)) { return; } SchemaTreeNode newNode = new SchemaTreeNode("Schema " + s.getLocator().getSystemId(), s.getLocator()); this.currNode = newNode; this.model.addSchemaNode(newNode); for (XSAttGroupDecl groupDecl : s.getAttGroupDecls().values()) { attGroupDecl(groupDecl); } for (XSAttributeDecl attrDecl : s.getAttributeDecls().values()) { attributeDecl(attrDecl); } for (XSComplexType complexType : s.getComplexTypes().values()) { complexType(complexType); } for (XSElementDecl elementDecl : s.getElementDecls().values()) { elementDecl(elementDecl); } for (XSModelGroupDecl modelGroupDecl : s.getModelGroupDecls().values()) { modelGroupDecl(modelGroupDecl); } for (XSSimpleType simpleType : s.getSimpleTypes().values()) { simpleType(simpleType); } } /* (non-Javadoc) * @see com.sun.xml.internal.xsom.visitor.XSVisitor#attGroupDecl(com.sun.xml.internal.xsom.XSAttGroupDecl) */ public void attGroupDecl(XSAttGroupDecl decl) { SchemaTreeNode newNode = new SchemaTreeNode("Attribute group \"" + decl.getName() + "\"", decl.getLocator()); this.currNode.add(newNode); this.currNode = newNode; Iterator itr; itr = decl.iterateAttGroups(); while (itr.hasNext()) { dumpRef((XSAttGroupDecl) itr.next()); } itr = decl.iterateDeclaredAttributeUses(); while (itr.hasNext()) { attributeUse((XSAttributeUse) itr.next()); } this.currNode = (SchemaTreeNode) this.currNode.getParent(); } /** * Creates node of attribute group decalration reference. * * @param decl Attribute group decalration reference. */ public void dumpRef(XSAttGroupDecl decl) { SchemaTreeNode newNode = new SchemaTreeNode("Attribute group ref \"{" + decl.getTargetNamespace() + "}" + decl.getName() + "\"", decl .getLocator()); this.currNode.add(newNode); } /* (non-Javadoc) * @see com.sun.xml.internal.xsom.visitor.XSVisitor#attributeUse(com.sun.xml.internal.xsom.XSAttributeUse) */ public void attributeUse(XSAttributeUse use) { XSAttributeDecl decl = use.getDecl(); String additionalAtts = ""; if (use.isRequired()) { additionalAtts += " use=\"required\""; } if (use.getFixedValue() != null && use.getDecl().getFixedValue() == null) { additionalAtts += " fixed=\"" + use.getFixedValue() + "\""; } if (use.getDefaultValue() != null && use.getDecl().getDefaultValue() == null) { additionalAtts += " default=\"" + use.getDefaultValue() + "\""; } if (decl.isLocal()) { // this is anonymous attribute use dump(decl, additionalAtts); } else { // reference to a global one String str = MessageFormat.format( "Attribute ref \"'{'{0}'}'{1}{2}\"", new Object[]{ decl.getTargetNamespace(), decl.getName(), additionalAtts}); SchemaTreeNode newNode = new SchemaTreeNode(str, decl.getLocator()); this.currNode.add(newNode); } } /* (non-Javadoc) * @see com.sun.xml.internal.xsom.visitor.XSVisitor#attributeDecl(com.sun.xml.internal.xsom.XSAttributeDecl) */ public void attributeDecl(XSAttributeDecl decl) { dump(decl, ""); } /** * Creates node for attribute declaration with additional attributes. * * @param decl Attribute declaration. * @param additionalAtts Additional attributes. */ private void dump(XSAttributeDecl decl, String additionalAtts) { XSSimpleType type = decl.getType(); String str = MessageFormat.format("Attribute \"{0}\"{1}{2}{3}{4}", new Object[]{ decl.getName(), additionalAtts, type.isLocal() ? "" : MessageFormat.format( " type=\"'{'{0}'}'{1}\"", new Object[]{ type.getTargetNamespace(), type.getName()}), decl.getFixedValue() == null ? "" : " fixed=\"" + decl.getFixedValue() + "\"", decl.getDefaultValue() == null ? "" : " default=\"" + decl.getDefaultValue() + "\""}); SchemaTreeNode newNode = new SchemaTreeNode(str, decl.getLocator()); this.currNode.add(newNode); this.currNode = newNode; if (type.isLocal()) { simpleType(type); } this.currNode = (SchemaTreeNode) this.currNode.getParent(); } /* (non-Javadoc) * @see com.sun.xml.internal.xsom.visitor.XSContentTypeVisitor#simpleType(com.sun.xml.internal.xsom.XSSimpleType) */ public void simpleType(XSSimpleType type) { String str = MessageFormat.format("Simple type {0}", new Object[]{type.isLocal() ? "" : " name=\"" + type.getName() + "\""}); SchemaTreeNode newNode = new SchemaTreeNode(str, type.getLocator()); this.currNode.add(newNode); this.currNode = newNode; type.visit((XSSimpleTypeVisitor) this); this.currNode = (SchemaTreeNode) this.currNode.getParent(); } /* (non-Javadoc) * @see com.sun.xml.internal.xsom.visitor.XSSimpleTypeVisitor#listSimpleType(com.sun.xml.internal.xsom.XSListSimpleType) */ public void listSimpleType(XSListSimpleType type) { XSSimpleType itemType = type.getItemType(); if (itemType.isLocal()) { SchemaTreeNode newNode = new SchemaTreeNode("List", type .getLocator()); this.currNode.add(newNode); this.currNode = newNode; simpleType(itemType); this.currNode = (SchemaTreeNode) this.currNode.getParent(); } else { // global type String str = MessageFormat.format("List itemType=\"'{'{0}'}'{1}\"", new Object[]{itemType.getTargetNamespace(), itemType.getName()}); SchemaTreeNode newNode = new SchemaTreeNode(str, itemType .getLocator()); this.currNode.add(newNode); } } /* (non-Javadoc) * @see com.sun.xml.internal.xsom.visitor.XSSimpleTypeVisitor#unionSimpleType(com.sun.xml.internal.xsom.XSUnionSimpleType) */ public void unionSimpleType(XSUnionSimpleType type) { final int len = type.getMemberSize(); StringBuffer ref = new StringBuffer(); for (int i = 0; i < len; i++) { XSSimpleType member = type.getMember(i); if (member.isGlobal()) { ref.append(MessageFormat.format(" '{'{0}'}'{1}", new Object[]{ member.getTargetNamespace(), member.getName()})); } } String name = (ref.length() == 0) ? "Union" : ("Union memberTypes=\"" + ref + "\""); SchemaTreeNode newNode = new SchemaTreeNode(name, type.getLocator()); this.currNode.add(newNode); this.currNode = newNode; for (int i = 0; i < len; i++) { XSSimpleType member = type.getMember(i); if (member.isLocal()) { simpleType(member); } } this.currNode = (SchemaTreeNode) this.currNode.getParent(); } /* (non-Javadoc) * @see com.sun.xml.internal.xsom.visitor.XSSimpleTypeVisitor#restrictionSimpleType(com.sun.xml.internal.xsom.XSRestrictionSimpleType) */ public void restrictionSimpleType(XSRestrictionSimpleType type) { if (type.getBaseType() == null) { // don't print anySimpleType if (!type.getName().equals("anySimpleType")) { throw new InternalError(); } if (!Const.schemaNamespace.equals(type.getTargetNamespace())) { throw new InternalError(); } return; } XSSimpleType baseType = type.getSimpleBaseType(); String str = MessageFormat.format("Restriction {0}", new Object[]{baseType.isLocal() ? "" : " base=\"{" + baseType.getTargetNamespace() + "}" + baseType.getName() + "\""}); SchemaTreeNode newNode = new SchemaTreeNode(str, baseType.getLocator()); this.currNode.add(newNode); this.currNode = newNode; if (baseType.isLocal()) { simpleType(baseType); } Iterator itr = type.iterateDeclaredFacets(); while (itr.hasNext()) { facet((XSFacet) itr.next()); } this.currNode = (SchemaTreeNode) this.currNode.getParent(); } /* (non-Javadoc) * @see com.sun.xml.internal.xsom.visitor.XSVisitor#facet(com.sun.xml.internal.xsom.XSFacet) */ public void facet(XSFacet facet) { SchemaTreeNode newNode = new SchemaTreeNode(MessageFormat.format( "{0} value=\"{1}\"", new Object[]{facet.getName(), facet.getValue(), }), facet.getLocator()); this.currNode.add(newNode); } /* (non-Javadoc) * @see com.sun.xml.internal.xsom.visitor.XSVisitor#notation(com.sun.xml.internal.xsom.XSNotation) */ public void notation(XSNotation notation) { SchemaTreeNode newNode = new SchemaTreeNode(MessageFormat.format( "Notation name='\"0}\" public =\"{1}\" system=\"{2}\"", new Object[]{notation.getName(), notation.getPublicId(), notation.getSystemId()}), notation.getLocator()); this.currNode.add(newNode); } /* (non-Javadoc) * @see com.sun.xml.internal.xsom.visitor.XSVisitor#complexType(com.sun.xml.internal.xsom.XSComplexType) */ public void complexType(XSComplexType type) { SchemaTreeNode newNode = new SchemaTreeNode(MessageFormat.format( "ComplexType {0}", new Object[]{type.isLocal() ? "" : " name=\"" + type.getName() + "\""}), type .getLocator()); this.currNode.add(newNode); this.currNode = newNode; // TODO: wildcard if (type.getContentType().asSimpleType() != null) { // simple content SchemaTreeNode newNode2 = new SchemaTreeNode("Simple content", type .getContentType().getLocator()); this.currNode.add(newNode2); this.currNode = newNode2; XSType baseType = type.getBaseType(); if (type.getDerivationMethod() == XSType.RESTRICTION) { // restriction String str = MessageFormat.format( "Restriction base=\"<{0}>{1}\"", new Object[]{ baseType.getTargetNamespace(), baseType.getName()}); SchemaTreeNode newNode3 = new SchemaTreeNode(str, baseType .getLocator()); this.currNode.add(newNode3); this.currNode = newNode3; dumpComplexTypeAttribute(type); this.currNode = (SchemaTreeNode) this.currNode.getParent(); } else { // extension String str = MessageFormat.format( "Extension base=\"<{0}>{1}\"", new Object[]{ baseType.getTargetNamespace(), baseType.getName()}); SchemaTreeNode newNode3 = new SchemaTreeNode(str, baseType .getLocator()); this.currNode.add(newNode3); this.currNode = newNode3; // check if have redefine tag if ((type.getTargetNamespace().compareTo( baseType.getTargetNamespace()) == 0) && (type.getName().compareTo(baseType.getName()) == 0)) { SchemaTreeNode newNodeRedefine = new SchemaTreeNode( "redefine", type .getLocator()); this.currNode.add(newNodeRedefine); this.currNode = newNodeRedefine; baseType.visit(this); this.currNode = (SchemaTreeNode) newNodeRedefine.getParent(); } dumpComplexTypeAttribute(type); this.currNode = (SchemaTreeNode) this.currNode.getParent(); } this.currNode = (SchemaTreeNode) this.currNode.getParent(); } else { // complex content SchemaTreeNode newNode2 = new SchemaTreeNode("Complex content", type.getContentType().getLocator()); this.currNode.add(newNode2); this.currNode = newNode2; XSComplexType baseType = type.getBaseType().asComplexType(); if (type.getDerivationMethod() == XSType.RESTRICTION) { // restriction String str = MessageFormat.format( "Restriction base=\"<{0}>{1}\"", new Object[]{ baseType.getTargetNamespace(), baseType.getName()}); SchemaTreeNode newNode3 = new SchemaTreeNode(str, baseType.getLocator()); this.currNode.add(newNode3); this.currNode = newNode3; type.getContentType().visit(this); dumpComplexTypeAttribute(type); this.currNode = (SchemaTreeNode) this.currNode.getParent(); } else { // extension String str = MessageFormat.format( "Extension base=\"'{'{0}'}'{1}\"", new Object[]{ baseType.getTargetNamespace(), baseType.getName()}); SchemaTreeNode newNode3 = new SchemaTreeNode(str, baseType.getLocator()); this.currNode.add(newNode3); this.currNode = newNode3; // check if have redefine tag if ((type.getTargetNamespace().compareTo( baseType.getTargetNamespace()) == 0) && (type.getName().compareTo(baseType.getName()) == 0)) { SchemaTreeNode newNodeRedefine = new SchemaTreeNode( "redefine", type .getLocator()); this.currNode.add(newNodeRedefine); this.currNode = newNodeRedefine; baseType.visit(this); this.currNode = (SchemaTreeNode) newNodeRedefine.getParent(); } type.getExplicitContent().visit(this); dumpComplexTypeAttribute(type); this.currNode = (SchemaTreeNode) this.currNode.getParent(); } this.currNode = (SchemaTreeNode) this.currNode.getParent(); } this.currNode = (SchemaTreeNode) this.currNode.getParent(); } /** * Creates node for complex type. * * @param type Complex type. */ private void dumpComplexTypeAttribute(XSComplexType type) { Iterator itr; itr = type.iterateAttGroups(); while (itr.hasNext()) { dumpRef((XSAttGroupDecl) itr.next()); } itr = type.iterateDeclaredAttributeUses(); while (itr.hasNext()) { attributeUse((XSAttributeUse) itr.next()); } } /* (non-Javadoc) * @see com.sun.xml.internal.xsom.visitor.XSTermVisitor#elementDecl(com.sun.xml.internal.xsom.XSElementDecl) */ public void elementDecl(XSElementDecl decl) { elementDecl(decl, ""); } /** * Creates node for element declaration with additional attributes. * * @param decl Element declaration. * @param extraAtts Additional attributes. */ private void elementDecl(XSElementDecl decl, String extraAtts) { XSType type = decl.getType(); // TODO: various other attributes String str = MessageFormat.format("Element name=\"{0}\"{1}{2}", new Object[]{ decl.getName(), type.isLocal() ? "" : " type=\"{" + type.getTargetNamespace() + "}" + type.getName() + "\"", extraAtts}); SchemaTreeNode newNode = new SchemaTreeNode(str, decl.getLocator()); this.currNode.add(newNode); this.currNode = newNode; if (type.isLocal()) { if (type.isLocal()) { type.visit(this); } } this.currNode = (SchemaTreeNode) this.currNode.getParent(); } /* (non-Javadoc) * @see com.sun.xml.internal.xsom.visitor.XSTermVisitor#modelGroupDecl(com.sun.xml.internal.xsom.XSModelGroupDecl) */ public void modelGroupDecl(XSModelGroupDecl decl) { SchemaTreeNode newNode = new SchemaTreeNode(MessageFormat.format( "Group name=\"{0}\"", new Object[]{decl.getName()}), decl.getLocator()); this.currNode.add(newNode); this.currNode = newNode; modelGroup(decl.getModelGroup()); this.currNode = (SchemaTreeNode) this.currNode.getParent(); } /* (non-Javadoc) * @see com.sun.xml.internal.xsom.visitor.XSTermVisitor#modelGroup(com.sun.xml.internal.xsom.XSModelGroup) */ public void modelGroup(XSModelGroup group) { modelGroup(group, ""); } /** * Creates node for model group with additional attributes. * * @param group Model group. * @param extraAtts Additional attributes. */ private void modelGroup(XSModelGroup group, String extraAtts) { SchemaTreeNode newNode = new SchemaTreeNode(MessageFormat.format( "{0}{1}", new Object[]{group.getCompositor(), extraAtts}), group.getLocator()); this.currNode.add(newNode); this.currNode = newNode; final int len = group.getSize(); for (int i = 0; i < len; i++) { particle(group.getChild(i)); } this.currNode = (SchemaTreeNode) this.currNode.getParent(); } /* (non-Javadoc) * @see com.sun.xml.internal.xsom.visitor.XSContentTypeVisitor#particle(com.sun.xml.internal.xsom.XSParticle) */ public void particle(XSParticle part) { BigInteger i; StringBuffer buf = new StringBuffer(); i = part.getMaxOccurs(); if (i.equals(BigInteger.valueOf(XSParticle.UNBOUNDED))) { buf.append(" maxOccurs=\"unbounded\""); } else { if (!i.equals(BigInteger.ONE)) { buf.append(" maxOccurs=\"" + i + "\""); } } i = part.getMinOccurs(); if (!i.equals(BigInteger.ONE)) { buf.append(" minOccurs=\"" + i + "\""); } final String extraAtts = buf.toString(); part.getTerm().visit(new XSTermVisitor() { public void elementDecl(XSElementDecl decl) { if (decl.isLocal()) { SchemaTreeTraverser.this.elementDecl(decl, extraAtts); } else { // reference SchemaTreeNode newNode = new SchemaTreeNode(MessageFormat .format("Element ref=\"'{'{0}'}'{1}\"{2}", new Object[]{decl.getTargetNamespace(), decl.getName(), extraAtts}), decl.getLocator()); currNode.add(newNode); } } public void modelGroupDecl(XSModelGroupDecl decl) { // reference SchemaTreeNode newNode = new SchemaTreeNode(MessageFormat .format("Group ref=\"'{'{0}'}'{1}\"{2}", new Object[]{ decl.getTargetNamespace(), decl.getName(), extraAtts}), decl.getLocator()); currNode.add(newNode); } public void modelGroup(XSModelGroup group) { SchemaTreeTraverser.this.modelGroup(group, extraAtts); } public void wildcard(XSWildcard wc) { SchemaTreeTraverser.this.wildcard(wc, extraAtts); } }); } /* (non-Javadoc) * @see com.sun.xml.internal.xsom.visitor.XSTermVisitor#wildcard(com.sun.xml.internal.xsom.XSWildcard) */ public void wildcard(XSWildcard wc) { wildcard(wc, ""); } /** * Creates node for wild card with additional attributes. * * @param wc Wild card. * @param extraAtts Additional attributes. */ private void wildcard(XSWildcard wc, String extraAtts) { // TODO SchemaTreeNode newNode = new SchemaTreeNode(MessageFormat.format( "Any ", new Object[]{extraAtts}), wc.getLocator()); currNode.add(newNode); } /* (non-Javadoc) * @see com.sun.xml.internal.xsom.visitor.XSVisitor#annotation(com.sun.xml.internal.xsom.XSAnnotation) */ public void annotation(XSAnnotation ann) { // TODO: it would be nice even if we just put <xs:documentation> } /* (non-Javadoc) * @see com.sun.xml.internal.xsom.visitor.XSContentTypeVisitor#empty(com.sun.xml.internal.xsom.XSContentType) */ public void empty(XSContentType t) { } /* (non-Javadoc) * @see com.sun.xml.internal.xsom.visitor.XSVisitor#identityConstraint(com.sun.xml.internal.xsom.XSIdentityConstraint) */ public void identityConstraint(XSIdentityConstraint ic) { } /* (non-Javadoc) * @see com.sun.xml.internal.xsom.visitor.XSVisitor#xpath(com.sun.xml.internal.xsom.XSXPath) */ public void xpath(XSXPath xp) { } }
googleapis/google-cloud-java
35,383
java-scheduler/google-cloud-scheduler/src/main/java/com/google/cloud/scheduler/v1beta1/stub/HttpJsonCloudSchedulerStub.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.scheduler.v1beta1.stub; import static com.google.cloud.scheduler.v1beta1.CloudSchedulerClient.ListJobsPagedResponse; import static com.google.cloud.scheduler.v1beta1.CloudSchedulerClient.ListLocationsPagedResponse; import com.google.api.core.BetaApi; import com.google.api.core.InternalApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.httpjson.ApiMethodDescriptor; import com.google.api.gax.httpjson.HttpJsonCallSettings; import com.google.api.gax.httpjson.HttpJsonStubCallableFactory; import com.google.api.gax.httpjson.ProtoMessageRequestFormatter; import com.google.api.gax.httpjson.ProtoMessageResponseParser; import com.google.api.gax.httpjson.ProtoRestSerializer; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.RequestParamsBuilder; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.location.GetLocationRequest; import com.google.cloud.location.ListLocationsRequest; import com.google.cloud.location.ListLocationsResponse; import com.google.cloud.location.Location; import com.google.cloud.scheduler.v1beta1.CreateJobRequest; import com.google.cloud.scheduler.v1beta1.DeleteJobRequest; import com.google.cloud.scheduler.v1beta1.GetJobRequest; import com.google.cloud.scheduler.v1beta1.Job; import com.google.cloud.scheduler.v1beta1.ListJobsRequest; import com.google.cloud.scheduler.v1beta1.ListJobsResponse; import com.google.cloud.scheduler.v1beta1.PauseJobRequest; import com.google.cloud.scheduler.v1beta1.ResumeJobRequest; import com.google.cloud.scheduler.v1beta1.RunJobRequest; import com.google.cloud.scheduler.v1beta1.UpdateJobRequest; import com.google.protobuf.Empty; import com.google.protobuf.TypeRegistry; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * REST stub implementation for the CloudScheduler service API. * * <p>This class is for advanced usage and reflects the underlying API directly. */ @BetaApi @Generated("by gapic-generator-java") public class HttpJsonCloudSchedulerStub extends CloudSchedulerStub { private static final TypeRegistry typeRegistry = TypeRegistry.newBuilder().build(); private static final ApiMethodDescriptor<ListJobsRequest, ListJobsResponse> listJobsMethodDescriptor = ApiMethodDescriptor.<ListJobsRequest, ListJobsResponse>newBuilder() .setFullMethodName("google.cloud.scheduler.v1beta1.CloudScheduler/ListJobs") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<ListJobsRequest>newBuilder() .setPath( "/v1beta1/{parent=projects/*/locations/*}/jobs", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<ListJobsRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "parent", request.getParent()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<ListJobsRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "filter", request.getFilter()); serializer.putQueryParam( fields, "legacyAppEngineCron", request.getLegacyAppEngineCron()); serializer.putQueryParam(fields, "pageSize", request.getPageSize()); serializer.putQueryParam(fields, "pageToken", request.getPageToken()); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<ListJobsResponse>newBuilder() .setDefaultInstance(ListJobsResponse.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<GetJobRequest, Job> getJobMethodDescriptor = ApiMethodDescriptor.<GetJobRequest, Job>newBuilder() .setFullMethodName("google.cloud.scheduler.v1beta1.CloudScheduler/GetJob") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<GetJobRequest>newBuilder() .setPath( "/v1beta1/{name=projects/*/locations/*/jobs/*}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<GetJobRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "name", request.getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<GetJobRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<Job>newBuilder() .setDefaultInstance(Job.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<CreateJobRequest, Job> createJobMethodDescriptor = ApiMethodDescriptor.<CreateJobRequest, Job>newBuilder() .setFullMethodName("google.cloud.scheduler.v1beta1.CloudScheduler/CreateJob") .setHttpMethod("POST") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<CreateJobRequest>newBuilder() .setPath( "/v1beta1/{parent=projects/*/locations/*}/jobs", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<CreateJobRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "parent", request.getParent()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<CreateJobRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create().toBody("job", request.getJob(), true)) .build()) .setResponseParser( ProtoMessageResponseParser.<Job>newBuilder() .setDefaultInstance(Job.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<UpdateJobRequest, Job> updateJobMethodDescriptor = ApiMethodDescriptor.<UpdateJobRequest, Job>newBuilder() .setFullMethodName("google.cloud.scheduler.v1beta1.CloudScheduler/UpdateJob") .setHttpMethod("PATCH") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<UpdateJobRequest>newBuilder() .setPath( "/v1beta1/{job.name=projects/*/locations/*/jobs/*}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<UpdateJobRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "job.name", request.getJob().getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<UpdateJobRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "updateMask", request.getUpdateMask()); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create().toBody("job", request.getJob(), true)) .build()) .setResponseParser( ProtoMessageResponseParser.<Job>newBuilder() .setDefaultInstance(Job.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<DeleteJobRequest, Empty> deleteJobMethodDescriptor = ApiMethodDescriptor.<DeleteJobRequest, Empty>newBuilder() .setFullMethodName("google.cloud.scheduler.v1beta1.CloudScheduler/DeleteJob") .setHttpMethod("DELETE") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<DeleteJobRequest>newBuilder() .setPath( "/v1beta1/{name=projects/*/locations/*/jobs/*}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<DeleteJobRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "name", request.getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<DeleteJobRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam( fields, "legacyAppEngineCron", request.getLegacyAppEngineCron()); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<Empty>newBuilder() .setDefaultInstance(Empty.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<PauseJobRequest, Job> pauseJobMethodDescriptor = ApiMethodDescriptor.<PauseJobRequest, Job>newBuilder() .setFullMethodName("google.cloud.scheduler.v1beta1.CloudScheduler/PauseJob") .setHttpMethod("POST") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<PauseJobRequest>newBuilder() .setPath( "/v1beta1/{name=projects/*/locations/*/jobs/*}:pause", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<PauseJobRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "name", request.getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<PauseJobRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody("*", request.toBuilder().clearName().build(), true)) .build()) .setResponseParser( ProtoMessageResponseParser.<Job>newBuilder() .setDefaultInstance(Job.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<ResumeJobRequest, Job> resumeJobMethodDescriptor = ApiMethodDescriptor.<ResumeJobRequest, Job>newBuilder() .setFullMethodName("google.cloud.scheduler.v1beta1.CloudScheduler/ResumeJob") .setHttpMethod("POST") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<ResumeJobRequest>newBuilder() .setPath( "/v1beta1/{name=projects/*/locations/*/jobs/*}:resume", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<ResumeJobRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "name", request.getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<ResumeJobRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody("*", request.toBuilder().clearName().build(), true)) .build()) .setResponseParser( ProtoMessageResponseParser.<Job>newBuilder() .setDefaultInstance(Job.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<RunJobRequest, Job> runJobMethodDescriptor = ApiMethodDescriptor.<RunJobRequest, Job>newBuilder() .setFullMethodName("google.cloud.scheduler.v1beta1.CloudScheduler/RunJob") .setHttpMethod("POST") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<RunJobRequest>newBuilder() .setPath( "/v1beta1/{name=projects/*/locations/*/jobs/*}:run", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<RunJobRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "name", request.getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<RunJobRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody("*", request.toBuilder().clearName().build(), true)) .build()) .setResponseParser( ProtoMessageResponseParser.<Job>newBuilder() .setDefaultInstance(Job.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<ListLocationsRequest, ListLocationsResponse> listLocationsMethodDescriptor = ApiMethodDescriptor.<ListLocationsRequest, ListLocationsResponse>newBuilder() .setFullMethodName("google.cloud.location.Locations/ListLocations") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<ListLocationsRequest>newBuilder() .setPath( "/v1beta1/{name=projects/*}/locations", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<ListLocationsRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "name", request.getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<ListLocationsRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<ListLocationsResponse>newBuilder() .setDefaultInstance(ListLocationsResponse.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<GetLocationRequest, Location> getLocationMethodDescriptor = ApiMethodDescriptor.<GetLocationRequest, Location>newBuilder() .setFullMethodName("google.cloud.location.Locations/GetLocation") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<GetLocationRequest>newBuilder() .setPath( "/v1beta1/{name=projects/*/locations/*}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<GetLocationRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "name", request.getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<GetLocationRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<Location>newBuilder() .setDefaultInstance(Location.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private final UnaryCallable<ListJobsRequest, ListJobsResponse> listJobsCallable; private final UnaryCallable<ListJobsRequest, ListJobsPagedResponse> listJobsPagedCallable; private final UnaryCallable<GetJobRequest, Job> getJobCallable; private final UnaryCallable<CreateJobRequest, Job> createJobCallable; private final UnaryCallable<UpdateJobRequest, Job> updateJobCallable; private final UnaryCallable<DeleteJobRequest, Empty> deleteJobCallable; private final UnaryCallable<PauseJobRequest, Job> pauseJobCallable; private final UnaryCallable<ResumeJobRequest, Job> resumeJobCallable; private final UnaryCallable<RunJobRequest, Job> runJobCallable; private final UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable; private final UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse> listLocationsPagedCallable; private final UnaryCallable<GetLocationRequest, Location> getLocationCallable; private final BackgroundResource backgroundResources; private final HttpJsonStubCallableFactory callableFactory; public static final HttpJsonCloudSchedulerStub create(CloudSchedulerStubSettings settings) throws IOException { return new HttpJsonCloudSchedulerStub(settings, ClientContext.create(settings)); } public static final HttpJsonCloudSchedulerStub create(ClientContext clientContext) throws IOException { return new HttpJsonCloudSchedulerStub( CloudSchedulerStubSettings.newHttpJsonBuilder().build(), clientContext); } public static final HttpJsonCloudSchedulerStub create( ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException { return new HttpJsonCloudSchedulerStub( CloudSchedulerStubSettings.newHttpJsonBuilder().build(), clientContext, callableFactory); } /** * Constructs an instance of HttpJsonCloudSchedulerStub, using the given settings. This is * protected so that it is easy to make a subclass, but otherwise, the static factory methods * should be preferred. */ protected HttpJsonCloudSchedulerStub( CloudSchedulerStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new HttpJsonCloudSchedulerCallableFactory()); } /** * Constructs an instance of HttpJsonCloudSchedulerStub, using the given settings. This is * protected so that it is easy to make a subclass, but otherwise, the static factory methods * should be preferred. */ protected HttpJsonCloudSchedulerStub( CloudSchedulerStubSettings settings, ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; HttpJsonCallSettings<ListJobsRequest, ListJobsResponse> listJobsTransportSettings = HttpJsonCallSettings.<ListJobsRequest, ListJobsResponse>newBuilder() .setMethodDescriptor(listJobsMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); HttpJsonCallSettings<GetJobRequest, Job> getJobTransportSettings = HttpJsonCallSettings.<GetJobRequest, Job>newBuilder() .setMethodDescriptor(getJobMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); HttpJsonCallSettings<CreateJobRequest, Job> createJobTransportSettings = HttpJsonCallSettings.<CreateJobRequest, Job>newBuilder() .setMethodDescriptor(createJobMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); HttpJsonCallSettings<UpdateJobRequest, Job> updateJobTransportSettings = HttpJsonCallSettings.<UpdateJobRequest, Job>newBuilder() .setMethodDescriptor(updateJobMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("job.name", String.valueOf(request.getJob().getName())); return builder.build(); }) .build(); HttpJsonCallSettings<DeleteJobRequest, Empty> deleteJobTransportSettings = HttpJsonCallSettings.<DeleteJobRequest, Empty>newBuilder() .setMethodDescriptor(deleteJobMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); HttpJsonCallSettings<PauseJobRequest, Job> pauseJobTransportSettings = HttpJsonCallSettings.<PauseJobRequest, Job>newBuilder() .setMethodDescriptor(pauseJobMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); HttpJsonCallSettings<ResumeJobRequest, Job> resumeJobTransportSettings = HttpJsonCallSettings.<ResumeJobRequest, Job>newBuilder() .setMethodDescriptor(resumeJobMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); HttpJsonCallSettings<RunJobRequest, Job> runJobTransportSettings = HttpJsonCallSettings.<RunJobRequest, Job>newBuilder() .setMethodDescriptor(runJobMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); HttpJsonCallSettings<ListLocationsRequest, ListLocationsResponse> listLocationsTransportSettings = HttpJsonCallSettings.<ListLocationsRequest, ListLocationsResponse>newBuilder() .setMethodDescriptor(listLocationsMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); HttpJsonCallSettings<GetLocationRequest, Location> getLocationTransportSettings = HttpJsonCallSettings.<GetLocationRequest, Location>newBuilder() .setMethodDescriptor(getLocationMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); this.listJobsCallable = callableFactory.createUnaryCallable( listJobsTransportSettings, settings.listJobsSettings(), clientContext); this.listJobsPagedCallable = callableFactory.createPagedCallable( listJobsTransportSettings, settings.listJobsSettings(), clientContext); this.getJobCallable = callableFactory.createUnaryCallable( getJobTransportSettings, settings.getJobSettings(), clientContext); this.createJobCallable = callableFactory.createUnaryCallable( createJobTransportSettings, settings.createJobSettings(), clientContext); this.updateJobCallable = callableFactory.createUnaryCallable( updateJobTransportSettings, settings.updateJobSettings(), clientContext); this.deleteJobCallable = callableFactory.createUnaryCallable( deleteJobTransportSettings, settings.deleteJobSettings(), clientContext); this.pauseJobCallable = callableFactory.createUnaryCallable( pauseJobTransportSettings, settings.pauseJobSettings(), clientContext); this.resumeJobCallable = callableFactory.createUnaryCallable( resumeJobTransportSettings, settings.resumeJobSettings(), clientContext); this.runJobCallable = callableFactory.createUnaryCallable( runJobTransportSettings, settings.runJobSettings(), clientContext); this.listLocationsCallable = callableFactory.createUnaryCallable( listLocationsTransportSettings, settings.listLocationsSettings(), clientContext); this.listLocationsPagedCallable = callableFactory.createPagedCallable( listLocationsTransportSettings, settings.listLocationsSettings(), clientContext); this.getLocationCallable = callableFactory.createUnaryCallable( getLocationTransportSettings, settings.getLocationSettings(), clientContext); this.backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } @InternalApi public static List<ApiMethodDescriptor> getMethodDescriptors() { List<ApiMethodDescriptor> methodDescriptors = new ArrayList<>(); methodDescriptors.add(listJobsMethodDescriptor); methodDescriptors.add(getJobMethodDescriptor); methodDescriptors.add(createJobMethodDescriptor); methodDescriptors.add(updateJobMethodDescriptor); methodDescriptors.add(deleteJobMethodDescriptor); methodDescriptors.add(pauseJobMethodDescriptor); methodDescriptors.add(resumeJobMethodDescriptor); methodDescriptors.add(runJobMethodDescriptor); methodDescriptors.add(listLocationsMethodDescriptor); methodDescriptors.add(getLocationMethodDescriptor); return methodDescriptors; } @Override public UnaryCallable<ListJobsRequest, ListJobsResponse> listJobsCallable() { return listJobsCallable; } @Override public UnaryCallable<ListJobsRequest, ListJobsPagedResponse> listJobsPagedCallable() { return listJobsPagedCallable; } @Override public UnaryCallable<GetJobRequest, Job> getJobCallable() { return getJobCallable; } @Override public UnaryCallable<CreateJobRequest, Job> createJobCallable() { return createJobCallable; } @Override public UnaryCallable<UpdateJobRequest, Job> updateJobCallable() { return updateJobCallable; } @Override public UnaryCallable<DeleteJobRequest, Empty> deleteJobCallable() { return deleteJobCallable; } @Override public UnaryCallable<PauseJobRequest, Job> pauseJobCallable() { return pauseJobCallable; } @Override public UnaryCallable<ResumeJobRequest, Job> resumeJobCallable() { return resumeJobCallable; } @Override public UnaryCallable<RunJobRequest, Job> runJobCallable() { return runJobCallable; } @Override public UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable() { return listLocationsCallable; } @Override public UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse> listLocationsPagedCallable() { return listLocationsPagedCallable; } @Override public UnaryCallable<GetLocationRequest, Location> getLocationCallable() { return getLocationCallable; } @Override public final void close() { try { backgroundResources.close(); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new IllegalStateException("Failed to close resource", e); } } @Override public void shutdown() { backgroundResources.shutdown(); } @Override public boolean isShutdown() { return backgroundResources.isShutdown(); } @Override public boolean isTerminated() { return backgroundResources.isTerminated(); } @Override public void shutdownNow() { backgroundResources.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return backgroundResources.awaitTermination(duration, unit); } }
googleapis/google-cloud-java
35,193
java-orchestration-airflow/proto-google-cloud-orchestration-airflow-v1/src/main/java/com/google/cloud/orchestration/airflow/service/v1/CreateUserWorkloadsSecretRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/orchestration/airflow/service/v1/environments.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.orchestration.airflow.service.v1; /** * * * <pre> * Create user workloads Secret request. * </pre> * * Protobuf type {@code * google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest} */ public final class CreateUserWorkloadsSecretRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest) CreateUserWorkloadsSecretRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateUserWorkloadsSecretRequest.newBuilder() to construct. private CreateUserWorkloadsSecretRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateUserWorkloadsSecretRequest() { parent_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateUserWorkloadsSecretRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.orchestration.airflow.service.v1.EnvironmentsOuterClass .internal_static_google_cloud_orchestration_airflow_service_v1_CreateUserWorkloadsSecretRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.orchestration.airflow.service.v1.EnvironmentsOuterClass .internal_static_google_cloud_orchestration_airflow_service_v1_CreateUserWorkloadsSecretRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest .class, com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest .Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The environment name to create a Secret for, in the form: * "projects/{projectId}/locations/{locationId}/environments/{environmentId}" * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The environment name to create a Secret for, in the form: * "projects/{projectId}/locations/{locationId}/environments/{environmentId}" * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int USER_WORKLOADS_SECRET_FIELD_NUMBER = 2; private com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecret userWorkloadsSecret_; /** * * * <pre> * Required. User workloads Secret to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecret user_workloads_secret = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the userWorkloadsSecret field is set. */ @java.lang.Override public boolean hasUserWorkloadsSecret() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. User workloads Secret to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecret user_workloads_secret = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The userWorkloadsSecret. */ @java.lang.Override public com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecret getUserWorkloadsSecret() { return userWorkloadsSecret_ == null ? com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecret.getDefaultInstance() : userWorkloadsSecret_; } /** * * * <pre> * Required. User workloads Secret to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecret user_workloads_secret = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecretOrBuilder getUserWorkloadsSecretOrBuilder() { return userWorkloadsSecret_ == null ? com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecret.getDefaultInstance() : userWorkloadsSecret_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getUserWorkloadsSecret()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUserWorkloadsSecret()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest)) { return super.equals(obj); } com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest other = (com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest) obj; if (!getParent().equals(other.getParent())) return false; if (hasUserWorkloadsSecret() != other.hasUserWorkloadsSecret()) return false; if (hasUserWorkloadsSecret()) { if (!getUserWorkloadsSecret().equals(other.getUserWorkloadsSecret())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (hasUserWorkloadsSecret()) { hash = (37 * hash) + USER_WORKLOADS_SECRET_FIELD_NUMBER; hash = (53 * hash) + getUserWorkloadsSecret().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Create user workloads Secret request. * </pre> * * Protobuf type {@code * google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest) com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.orchestration.airflow.service.v1.EnvironmentsOuterClass .internal_static_google_cloud_orchestration_airflow_service_v1_CreateUserWorkloadsSecretRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.orchestration.airflow.service.v1.EnvironmentsOuterClass .internal_static_google_cloud_orchestration_airflow_service_v1_CreateUserWorkloadsSecretRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest .class, com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest .Builder.class); } // Construct using // com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getUserWorkloadsSecretFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; userWorkloadsSecret_ = null; if (userWorkloadsSecretBuilder_ != null) { userWorkloadsSecretBuilder_.dispose(); userWorkloadsSecretBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.orchestration.airflow.service.v1.EnvironmentsOuterClass .internal_static_google_cloud_orchestration_airflow_service_v1_CreateUserWorkloadsSecretRequest_descriptor; } @java.lang.Override public com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest getDefaultInstanceForType() { return com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest build() { com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest buildPartial() { com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest result = new com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest( this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.userWorkloadsSecret_ = userWorkloadsSecretBuilder_ == null ? userWorkloadsSecret_ : userWorkloadsSecretBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest) { return mergeFrom( (com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest other) { if (other == com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest .getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasUserWorkloadsSecret()) { mergeUserWorkloadsSecret(other.getUserWorkloadsSecret()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getUserWorkloadsSecretFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The environment name to create a Secret for, in the form: * "projects/{projectId}/locations/{locationId}/environments/{environmentId}" * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The environment name to create a Secret for, in the form: * "projects/{projectId}/locations/{locationId}/environments/{environmentId}" * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The environment name to create a Secret for, in the form: * "projects/{projectId}/locations/{locationId}/environments/{environmentId}" * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The environment name to create a Secret for, in the form: * "projects/{projectId}/locations/{locationId}/environments/{environmentId}" * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The environment name to create a Secret for, in the form: * "projects/{projectId}/locations/{locationId}/environments/{environmentId}" * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecret userWorkloadsSecret_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecret, com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecret.Builder, com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecretOrBuilder> userWorkloadsSecretBuilder_; /** * * * <pre> * Required. User workloads Secret to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecret user_workloads_secret = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the userWorkloadsSecret field is set. */ public boolean hasUserWorkloadsSecret() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. User workloads Secret to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecret user_workloads_secret = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The userWorkloadsSecret. */ public com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecret getUserWorkloadsSecret() { if (userWorkloadsSecretBuilder_ == null) { return userWorkloadsSecret_ == null ? com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecret .getDefaultInstance() : userWorkloadsSecret_; } else { return userWorkloadsSecretBuilder_.getMessage(); } } /** * * * <pre> * Required. User workloads Secret to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecret user_workloads_secret = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUserWorkloadsSecret( com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecret value) { if (userWorkloadsSecretBuilder_ == null) { if (value == null) { throw new NullPointerException(); } userWorkloadsSecret_ = value; } else { userWorkloadsSecretBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. User workloads Secret to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecret user_workloads_secret = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUserWorkloadsSecret( com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecret.Builder builderForValue) { if (userWorkloadsSecretBuilder_ == null) { userWorkloadsSecret_ = builderForValue.build(); } else { userWorkloadsSecretBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. User workloads Secret to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecret user_workloads_secret = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUserWorkloadsSecret( com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecret value) { if (userWorkloadsSecretBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && userWorkloadsSecret_ != null && userWorkloadsSecret_ != com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecret .getDefaultInstance()) { getUserWorkloadsSecretBuilder().mergeFrom(value); } else { userWorkloadsSecret_ = value; } } else { userWorkloadsSecretBuilder_.mergeFrom(value); } if (userWorkloadsSecret_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. User workloads Secret to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecret user_workloads_secret = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUserWorkloadsSecret() { bitField0_ = (bitField0_ & ~0x00000002); userWorkloadsSecret_ = null; if (userWorkloadsSecretBuilder_ != null) { userWorkloadsSecretBuilder_.dispose(); userWorkloadsSecretBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. User workloads Secret to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecret user_workloads_secret = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecret.Builder getUserWorkloadsSecretBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUserWorkloadsSecretFieldBuilder().getBuilder(); } /** * * * <pre> * Required. User workloads Secret to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecret user_workloads_secret = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecretOrBuilder getUserWorkloadsSecretOrBuilder() { if (userWorkloadsSecretBuilder_ != null) { return userWorkloadsSecretBuilder_.getMessageOrBuilder(); } else { return userWorkloadsSecret_ == null ? com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecret .getDefaultInstance() : userWorkloadsSecret_; } } /** * * * <pre> * Required. User workloads Secret to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecret user_workloads_secret = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecret, com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecret.Builder, com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecretOrBuilder> getUserWorkloadsSecretFieldBuilder() { if (userWorkloadsSecretBuilder_ == null) { userWorkloadsSecretBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecret, com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecret.Builder, com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsSecretOrBuilder>( getUserWorkloadsSecret(), getParentForChildren(), isClean()); userWorkloadsSecret_ = null; } return userWorkloadsSecretBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest) } // @@protoc_insertion_point(class_scope:google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest) private static final com.google.cloud.orchestration.airflow.service.v1 .CreateUserWorkloadsSecretRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest(); } public static com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateUserWorkloadsSecretRequest> PARSER = new com.google.protobuf.AbstractParser<CreateUserWorkloadsSecretRequest>() { @java.lang.Override public CreateUserWorkloadsSecretRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateUserWorkloadsSecretRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateUserWorkloadsSecretRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsSecretRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/ozone
35,342
hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/TestSCMContainerPlacementRackAware.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdds.scm.container.placement.algorithms; import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeOperationalState.DECOMMISSIONED; import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeOperationalState.IN_SERVICE; import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState.HEALTHY; import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_DATANODE_RATIS_VOLUME_FREE_SPACE_MIN; import static org.apache.hadoop.hdds.scm.net.NetConstants.LEAF_SCHEMA; import static org.apache.hadoop.hdds.scm.net.NetConstants.RACK_SCHEMA; import static org.apache.hadoop.hdds.scm.net.NetConstants.ROOT_SCHEMA; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assumptions.assumeTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.stream.IntStream; import org.apache.commons.lang3.RandomUtils; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.conf.StorageUnit; import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.hdds.protocol.MockDatanodeDetails; import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.MetadataStorageReportProto; import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.StorageReportProto; import org.apache.hadoop.hdds.scm.ContainerPlacementStatus; import org.apache.hadoop.hdds.scm.HddsTestUtils; import org.apache.hadoop.hdds.scm.exceptions.SCMException; import org.apache.hadoop.hdds.scm.net.NetConstants; import org.apache.hadoop.hdds.scm.net.NetworkTopology; import org.apache.hadoop.hdds.scm.net.NetworkTopologyImpl; import org.apache.hadoop.hdds.scm.net.NodeSchema; import org.apache.hadoop.hdds.scm.net.NodeSchemaManager; import org.apache.hadoop.hdds.scm.node.DatanodeInfo; import org.apache.hadoop.hdds.scm.node.NodeManager; import org.apache.hadoop.hdds.scm.node.NodeStatus; import org.apache.hadoop.ozone.container.upgrade.UpgradeUtils; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; import org.junit.jupiter.params.provider.ValueSource; /** * Test for the scm container rack aware placement. */ public class TestSCMContainerPlacementRackAware { private NetworkTopology cluster; private OzoneConfiguration conf; private NodeManager nodeManager; private final List<DatanodeDetails> datanodes = new ArrayList<>(); private final List<DatanodeInfo> dnInfos = new ArrayList<>(); // policy with fallback capability private SCMContainerPlacementRackAware policy; // policy prohibit fallback private SCMContainerPlacementRackAware policyNoFallback; // node storage capacity private static final long STORAGE_CAPACITY = 100L; private SCMContainerPlacementMetrics metrics; private static final int NODE_PER_RACK = 5; private static IntStream numDatanodes() { return IntStream.rangeClosed(3, 15); } private void setup(int datanodeCount) { //initialize network topology instance conf = new OzoneConfiguration(); // We are using small units here conf.setStorageSize(OZONE_DATANODE_RATIS_VOLUME_FREE_SPACE_MIN, 1, StorageUnit.BYTES); NodeSchema[] schemas = new NodeSchema[] {ROOT_SCHEMA, RACK_SCHEMA, LEAF_SCHEMA}; NodeSchemaManager.getInstance().init(schemas, true); cluster = new NetworkTopologyImpl(NodeSchemaManager.getInstance()); // build datanodes, and network topology String rack = "/rack"; String hostname = "node"; for (int i = 0; i < datanodeCount; i++) { // Totally 3 racks, each has 5 datanodes DatanodeDetails datanodeDetails = MockDatanodeDetails.createDatanodeDetails( hostname + i, rack + (i / NODE_PER_RACK)); datanodes.add(datanodeDetails); cluster.add(datanodeDetails); DatanodeInfo datanodeInfo = new DatanodeInfo( datanodeDetails, NodeStatus.inServiceHealthy(), UpgradeUtils.defaultLayoutVersionProto()); StorageReportProto storage1 = HddsTestUtils.createStorageReport( datanodeInfo.getID(), "/data1-" + datanodeInfo.getID(), STORAGE_CAPACITY, 0, 100L, null); MetadataStorageReportProto metaStorage1 = HddsTestUtils.createMetadataStorageReport( "/metadata1-" + datanodeInfo.getID(), STORAGE_CAPACITY, 0, 100L, null); datanodeInfo.updateStorageReports( new ArrayList<>(Arrays.asList(storage1))); datanodeInfo.updateMetaDataStorageReports( new ArrayList<>(Arrays.asList(metaStorage1))); dnInfos.add(datanodeInfo); } if (datanodeCount > 4) { StorageReportProto storage2 = HddsTestUtils.createStorageReport( dnInfos.get(2).getID(), "/data1-" + datanodes.get(2).getID(), STORAGE_CAPACITY, 90L, 10L, null); dnInfos.get(2).updateStorageReports( new ArrayList<>(Arrays.asList(storage2))); StorageReportProto storage3 = HddsTestUtils.createStorageReport( dnInfos.get(3).getID(), "/data1-" + dnInfos.get(3).getID(), STORAGE_CAPACITY, 80L, 20L, null); dnInfos.get(3).updateStorageReports( new ArrayList<>(Arrays.asList(storage3))); StorageReportProto storage4 = HddsTestUtils.createStorageReport( dnInfos.get(4).getID(), "/data1-" + dnInfos.get(4).getID(), STORAGE_CAPACITY, 70L, 30L, null); dnInfos.get(4).updateStorageReports( new ArrayList<>(Arrays.asList(storage4))); } else if (datanodeCount > 3) { StorageReportProto storage2 = HddsTestUtils.createStorageReport( dnInfos.get(2).getID(), "/data1-" + dnInfos.get(2).getID(), STORAGE_CAPACITY, 90L, 10L, null); dnInfos.get(2).updateStorageReports( new ArrayList<>(Arrays.asList(storage2))); StorageReportProto storage3 = HddsTestUtils.createStorageReport( dnInfos.get(3).getID(), "/data1-" + dnInfos.get(3).getID(), STORAGE_CAPACITY, 80L, 20L, null); dnInfos.get(3).updateStorageReports( new ArrayList<>(Arrays.asList(storage3))); } else if (datanodeCount > 2) { StorageReportProto storage2 = HddsTestUtils.createStorageReport( dnInfos.get(2).getID(), "/data1-" + dnInfos.get(2).getID(), STORAGE_CAPACITY, 84L, 16L, null); dnInfos.get(2).updateStorageReports( new ArrayList<>(Arrays.asList(storage2))); } // create mock node manager nodeManager = mock(NodeManager.class); when(nodeManager.getNodes(NodeStatus.inServiceHealthy())) .thenReturn(new ArrayList<>(datanodes)); for (DatanodeInfo dn: dnInfos) { when(nodeManager.getNode(dn.getID())) .thenReturn(dn); } when(nodeManager.getClusterNetworkTopologyMap()) .thenReturn(cluster); // create placement policy instances policy = new SCMContainerPlacementRackAware( nodeManager, conf, cluster, true, metrics); policyNoFallback = new SCMContainerPlacementRackAware( nodeManager, conf, cluster, false, metrics); } @BeforeEach public void init() { metrics = SCMContainerPlacementMetrics.create(); } @AfterEach public void teardown() { metrics.unRegister(); } @ParameterizedTest @MethodSource("numDatanodes") public void chooseNodeWithNoExcludedNodes(int datanodeCount) throws SCMException { setup(datanodeCount); // test choose new datanodes for new pipeline cases // 1 replica int nodeNum = 1; List<DatanodeDetails> datanodeDetails = policy.chooseDatanodes(null, null, nodeNum, 0, 15); assertEquals(nodeNum, datanodeDetails.size()); // 2 replicas nodeNum = 2; datanodeDetails = policy.chooseDatanodes(null, null, nodeNum, 0, 15); assertEquals(nodeNum, datanodeDetails.size()); assertTrue(cluster.isSameParent(datanodeDetails.get(0), datanodeDetails.get(1)) || (datanodeCount % NODE_PER_RACK == 1)); // 3 replicas nodeNum = 3; datanodeDetails = policy.chooseDatanodes(null, null, nodeNum, 0, 15); assertEquals(nodeNum, datanodeDetails.size()); // requires at least 2 racks for following statement assumeTrue(datanodeCount > NODE_PER_RACK && datanodeCount % NODE_PER_RACK > 1); assertTrue(cluster.isSameParent(datanodeDetails.get(0), datanodeDetails.get(1))); assertFalse(cluster.isSameParent(datanodeDetails.get(0), datanodeDetails.get(2))); assertFalse(cluster.isSameParent(datanodeDetails.get(1), datanodeDetails.get(2))); // 4 replicas nodeNum = 4; datanodeDetails = policy.chooseDatanodes(null, null, nodeNum, 0, 15); assertEquals(nodeNum, datanodeDetails.size()); // requires at least 2 racks and enough datanodes for following statement assumeTrue(datanodeCount > NODE_PER_RACK + 1); assertTrue(cluster.isSameParent(datanodeDetails.get(0), datanodeDetails.get(1))); assertFalse(cluster.isSameParent(datanodeDetails.get(0), datanodeDetails.get(2))); assertFalse(cluster.isSameParent(datanodeDetails.get(1), datanodeDetails.get(2))); } @ParameterizedTest @MethodSource("numDatanodes") public void chooseNodeWithExcludedNodes(int datanodeCount) throws SCMException { // test choose new datanodes for under replicated pipeline // 3 replicas, two existing datanodes on same rack assumeTrue(datanodeCount > NODE_PER_RACK); setup(datanodeCount); int nodeNum = 1; List<DatanodeDetails> excludedNodes = new ArrayList<>(); excludedNodes.add(datanodes.get(0)); excludedNodes.add(datanodes.get(1)); List<DatanodeDetails> datanodeDetails = policy.chooseDatanodes( excludedNodes, null, nodeNum, 0, 15); assertEquals(nodeNum, datanodeDetails.size()); assertFalse(cluster.isSameParent(datanodeDetails.get(0), excludedNodes.get(0))); assertFalse(cluster.isSameParent(datanodeDetails.get(0), excludedNodes.get(1))); // 3 replicas, one existing datanode nodeNum = 2; excludedNodes.clear(); excludedNodes.add(datanodes.get(0)); datanodeDetails = policy.chooseDatanodes( excludedNodes, null, nodeNum, 0, 15); assertEquals(nodeNum, datanodeDetails.size()); assertTrue(cluster.isSameParent( datanodeDetails.get(0), excludedNodes.get(0)) || cluster.isSameParent(datanodeDetails.get(0), excludedNodes.get(1))); // 3 replicas, two existing datanodes on different rack nodeNum = 1; excludedNodes.clear(); excludedNodes.add(datanodes.get(0)); excludedNodes.add(datanodes.get(5)); datanodeDetails = policy.chooseDatanodes( excludedNodes, null, nodeNum, 0, 15); assertEquals(nodeNum, datanodeDetails.size()); assertTrue(cluster.isSameParent( datanodeDetails.get(0), excludedNodes.get(0)) || cluster.isSameParent(datanodeDetails.get(0), excludedNodes.get(1))); } @ParameterizedTest @ValueSource(ints = {NODE_PER_RACK + 1, 2 * NODE_PER_RACK + 1}) public void testSingleNodeRack(int datanodeCount) throws SCMException { // make sure there is a single node rack assumeTrue(datanodeCount % NODE_PER_RACK == 1); setup(datanodeCount); List<DatanodeDetails> excludeNodes = new ArrayList<>(); excludeNodes.add(datanodes.get(datanodeCount - 1)); excludeNodes.add(datanodes.get(0)); List<DatanodeDetails> chooseDatanodes = policy.chooseDatanodes(excludeNodes, null, 1, 0, 0); assertEquals(1, chooseDatanodes.size()); // the selected node should be on the same rack as the second exclude node assertTrue( cluster.isSameParent(chooseDatanodes.get(0), excludeNodes.get(1)), chooseDatanodes.get(0).toString()); } @ParameterizedTest @ValueSource(ints = {12, 13, 14}) public void testFallback(int datanodeCount) throws SCMException { // 5 replicas. there are only 3 racks. policy with fallback should // allocate the 5th datanode though it will break the rack rule(first // 2 replicas on same rack, others on different racks). assumeTrue(datanodeCount > NODE_PER_RACK * 2 && (datanodeCount % NODE_PER_RACK > 1)); setup(datanodeCount); int nodeNum = 5; List<DatanodeDetails> datanodeDetails = policy.chooseDatanodes(null, null, nodeNum, 0, 15); assertEquals(nodeNum, datanodeDetails.size()); assertTrue(cluster.isSameParent(datanodeDetails.get(0), datanodeDetails.get(1))); assertFalse(cluster.isSameParent(datanodeDetails.get(0), datanodeDetails.get(2))); assertFalse(cluster.isSameParent(datanodeDetails.get(1), datanodeDetails.get(2))); assertFalse(cluster.isSameParent(datanodeDetails.get(0), datanodeDetails.get(3))); assertFalse(cluster.isSameParent(datanodeDetails.get(2), datanodeDetails.get(3))); // get metrics long totalRequest = metrics.getDatanodeRequestCount(); long successCount = metrics.getDatanodeChooseSuccessCount(); long tryCount = metrics.getDatanodeChooseAttemptCount(); long compromiseCount = metrics.getDatanodeChooseFallbackCount(); // verify metrics assertEquals(totalRequest, nodeNum); assertEquals(successCount, nodeNum); assertThat(tryCount).isGreaterThan(nodeNum); assertThat(compromiseCount).isGreaterThanOrEqualTo(1); } @ParameterizedTest @ValueSource(ints = {11, 12, 13, 14, 15}) public void testNoFallback(int datanodeCount) { assumeTrue(datanodeCount > (NODE_PER_RACK * 2) && (datanodeCount <= NODE_PER_RACK * 3)); setup(datanodeCount); // 5 replicas. there are only 3 racks. policy prohibit fallback should fail. int nodeNum = 5; Exception e = assertThrows(Exception.class, () -> policyNoFallback.chooseDatanodes(null, null, nodeNum, 0, 15), "Fallback prohibited, this call should fail"); assertEquals("SCMException", e.getClass().getSimpleName()); // get metrics long totalRequest = metrics.getDatanodeRequestCount(); long successCount = metrics.getDatanodeChooseSuccessCount(); long tryCount = metrics.getDatanodeChooseAttemptCount(); long compromiseCount = metrics.getDatanodeChooseFallbackCount(); assertEquals(nodeNum, totalRequest); assertThat(successCount).withFailMessage("Not enough success count") .isGreaterThanOrEqualTo(1); assertThat(tryCount).withFailMessage("Not enough try count") .isGreaterThanOrEqualTo(1); assertEquals(0, compromiseCount); } @ParameterizedTest @MethodSource("numDatanodes") public void chooseNodeWithFavoredNodes(int datanodeCount) throws SCMException { setup(datanodeCount); int nodeNum = 1; List<DatanodeDetails> excludedNodes = new ArrayList<>(); List<DatanodeDetails> favoredNodes = new ArrayList<>(); // no excludedNodes, only favoredNodes favoredNodes.add(datanodes.get(0)); List<DatanodeDetails> datanodeDetails = policy.chooseDatanodes( excludedNodes, favoredNodes, nodeNum, 0, 15); assertEquals(nodeNum, datanodeDetails.size()); assertEquals(datanodeDetails.get(0).getNetworkFullPath(), favoredNodes.get(0).getNetworkFullPath()); // no overlap between excludedNodes and favoredNodes, favoredNodes can been // chosen. excludedNodes.clear(); favoredNodes.clear(); excludedNodes.add(datanodes.get(0)); favoredNodes.add(datanodes.get(2)); datanodeDetails = policy.chooseDatanodes( excludedNodes, favoredNodes, nodeNum, 0, 15); assertEquals(nodeNum, datanodeDetails.size()); assertEquals(datanodeDetails.get(0).getNetworkFullPath(), favoredNodes.get(0).getNetworkFullPath()); // there is overlap between excludedNodes and favoredNodes, favoredNodes // should not be chosen. excludedNodes.clear(); favoredNodes.clear(); excludedNodes.add(datanodes.get(0)); favoredNodes.add(datanodes.get(0)); datanodeDetails = policy.chooseDatanodes( excludedNodes, favoredNodes, nodeNum, 0, 15); assertEquals(nodeNum, datanodeDetails.size()); assertNotEquals(datanodeDetails.get(0).getNetworkFullPath(), favoredNodes.get(0).getNetworkFullPath()); } @ParameterizedTest @MethodSource("numDatanodes") public void testNoInfiniteLoop(int datanodeCount) { setup(datanodeCount); int nodeNum = 1; // request storage space larger than node capability Exception e = assertThrows(Exception.class, () -> policy.chooseDatanodes(null, null, nodeNum, STORAGE_CAPACITY + 0, 15), "Storage requested exceeds capacity, this call should fail"); assertEquals("SCMException", e.getClass().getSimpleName()); // get metrics long totalRequest = metrics.getDatanodeRequestCount(); long successCount = metrics.getDatanodeChooseSuccessCount(); long tryCount = metrics.getDatanodeChooseAttemptCount(); long compromiseCount = metrics.getDatanodeChooseFallbackCount(); assertEquals(totalRequest, nodeNum); assertEquals(successCount, 0); assertThat(tryCount).withFailMessage("Not enough try").isGreaterThanOrEqualTo(nodeNum); assertEquals(compromiseCount, 0); } @ParameterizedTest @MethodSource("numDatanodes") public void testDatanodeWithDefaultNetworkLocation(int datanodeCount) throws SCMException { setup(datanodeCount); String hostname = "node"; List<DatanodeInfo> dnInfoList = new ArrayList<>(); List<DatanodeDetails> dataList = new ArrayList<>(); NetworkTopology clusterMap = new NetworkTopologyImpl(NodeSchemaManager.getInstance()); for (int i = 0; i < 15; i++) { // Totally 3 racks, each has 5 datanodes DatanodeDetails dn = MockDatanodeDetails.createDatanodeDetails( hostname + i, null); DatanodeInfo dnInfo = new DatanodeInfo( dn, NodeStatus.inServiceHealthy(), UpgradeUtils.defaultLayoutVersionProto()); StorageReportProto storage1 = HddsTestUtils.createStorageReport( dnInfo.getID(), "/data1-" + dnInfo.getID(), STORAGE_CAPACITY, 0, 100L, null); MetadataStorageReportProto metaStorage1 = HddsTestUtils.createMetadataStorageReport( "/metadata1-" + dnInfo.getID(), STORAGE_CAPACITY, 0, 100L, null); dnInfo.updateStorageReports( new ArrayList<>(Arrays.asList(storage1))); dnInfo.updateMetaDataStorageReports( new ArrayList<>(Arrays.asList(metaStorage1))); dataList.add(dn); clusterMap.add(dn); dnInfoList.add(dnInfo); } assertEquals(dataList.size(), StringUtils.countMatches( clusterMap.toString(), NetConstants.DEFAULT_RACK)); for (DatanodeInfo dn: dnInfoList) { when(nodeManager.getNode(dn.getID())) .thenReturn(dn); } // choose nodes to host 3 replica int nodeNum = 3; SCMContainerPlacementRackAware newPolicy = new SCMContainerPlacementRackAware(nodeManager, conf, clusterMap, true, metrics); List<DatanodeDetails> datanodeDetails = newPolicy.chooseDatanodes(null, null, nodeNum, 0, 15); assertEquals(nodeNum, datanodeDetails.size()); assertTrue(cluster.isSameParent(datanodeDetails.get(0), datanodeDetails.get(1))); assertTrue(cluster.isSameParent(datanodeDetails.get(0), datanodeDetails.get(2))); assertTrue(cluster.isSameParent(datanodeDetails.get(1), datanodeDetails.get(2))); } @Test public void testvalidateContainerPlacement() { // Only run this test for the full set of DNs. 5 DNs per rack on 3 racks. final int datanodeCount = 15; setup(datanodeCount); List<DatanodeDetails> dns = new ArrayList<>(); // First 5 node are on the same rack dns.add(datanodes.get(0)); dns.add(datanodes.get(1)); dns.add(datanodes.get(2)); ContainerPlacementStatus stat = policy.validateContainerPlacement(dns, 3); assertFalse(stat.isPolicySatisfied()); assertEquals(1, stat.misReplicationCount()); // Pick a new list which spans 2 racks dns = new ArrayList<>(); dns.add(datanodes.get(0)); dns.add(datanodes.get(1)); dns.add(datanodes.get(5)); // This is on second rack stat = policy.validateContainerPlacement(dns, 3); assertTrue(stat.isPolicySatisfied()); assertEquals(0, stat.misReplicationCount()); // Pick single DN, expecting 3 replica. Policy is not met. dns = new ArrayList<>(); dns.add(datanodes.get(0)); stat = policy.validateContainerPlacement(dns, 3); assertFalse(stat.isPolicySatisfied()); assertEquals(1, stat.misReplicationCount()); // Pick single DN, expecting 1 replica. Policy is met. dns = new ArrayList<>(); dns.add(datanodes.get(0)); stat = policy.validateContainerPlacement(dns, 1); assertTrue(stat.isPolicySatisfied()); assertEquals(0, stat.misReplicationCount()); } @Test public void testvalidateContainerPlacementSingleRackCluster() { final int datanodeCount = 5; setup(datanodeCount); // All nodes are on the same rack in this test, and the cluster only has // one rack. List<DatanodeDetails> dns = new ArrayList<>(); dns.add(datanodes.get(0)); dns.add(datanodes.get(1)); dns.add(datanodes.get(2)); ContainerPlacementStatus stat = policy.validateContainerPlacement(dns, 3); assertTrue(stat.isPolicySatisfied()); assertEquals(0, stat.misReplicationCount()); // Single DN - policy met as cluster only has one rack. dns = new ArrayList<>(); dns.add(datanodes.get(0)); stat = policy.validateContainerPlacement(dns, 3); assertTrue(stat.isPolicySatisfied()); assertEquals(0, stat.misReplicationCount()); // Single DN - only 1 replica expected dns = new ArrayList<>(); dns.add(datanodes.get(0)); stat = policy.validateContainerPlacement(dns, 1); assertTrue(stat.isPolicySatisfied()); assertEquals(0, stat.misReplicationCount()); } @ParameterizedTest @MethodSource("org.apache.hadoop.hdds.scm.node.NodeStatus#outOfServiceStates") public void testOverReplicationAndOutOfServiceNodes(HddsProtos.NodeOperationalState state) { setup(7); // 7 datanodes, all nodes are used. // /rack0/node0 -> IN_SERVICE // /rack0/node1 -> IN_SERVICE // /rack0/node2 -> OFFLINE // /rack0/node3 -> OFFLINE // /rack0/node4 -> OFFLINE // /rack1/node5 -> IN_SERVICE // /rack1/node6 -> OFFLINE datanodes.get(2).setPersistedOpState(state); datanodes.get(3).setPersistedOpState(state); datanodes.get(4).setPersistedOpState(state); datanodes.get(6).setPersistedOpState(state); List<DatanodeDetails> dns = new ArrayList<>(datanodes); ContainerPlacementStatus status = policy.validateContainerPlacement(dns, 3); assertTrue(status.isPolicySatisfied()); assertEquals(2, status.actualPlacementCount()); assertEquals(2, status.expectedPlacementCount()); assertEquals(0, status.misReplicationCount()); assertNull(status.misReplicatedReason()); // /rack0/node0 -> IN_SERVICE // /rack0/node1 -> IN_SERVICE // /rack0/node2 -> OFFLINE > IN_SERVICE // /rack0/node3 -> OFFLINE // /rack0/node4 -> OFFLINE // /rack1/node5 -> IN_SERVICE // /rack1/node6 -> OFFLINE > IN_SERVICE datanodes.get(2).setPersistedOpState(IN_SERVICE); datanodes.get(6).setPersistedOpState(IN_SERVICE); dns = new ArrayList<>(datanodes); status = policy.validateContainerPlacement(dns, 3); assertTrue(status.isPolicySatisfied()); assertEquals(2, status.actualPlacementCount()); assertEquals(2, status.expectedPlacementCount()); assertEquals(0, status.misReplicationCount()); assertNull(status.misReplicatedReason()); } @ParameterizedTest @MethodSource("numDatanodes") public void testOutOfServiceNodesNotSelected(int datanodeCount) { setup(datanodeCount); // Set all the nodes to out of service for (DatanodeInfo dn : dnInfos) { dn.setNodeStatus(NodeStatus.valueOf(DECOMMISSIONED, HEALTHY)); } for (int i = 0; i < 10; i++) { // Set a random DN to in_service and ensure it is always picked int index = RandomUtils.secure().randomInt(0, dnInfos.size()); dnInfos.get(index).setNodeStatus(NodeStatus.inServiceHealthy()); try { List<DatanodeDetails> datanodeDetails = policy.chooseDatanodes(null, null, 1, 0, 0); assertEquals(dnInfos.get(index), datanodeDetails.get(0)); } catch (SCMException e) { // If we get SCMException: No satisfied datanode to meet the ... this is // ok, as there is only 1 IN_SERVICE node and with the retry logic we // may never find it. } dnInfos.get(index).setNodeStatus(NodeStatus.valueOf(DECOMMISSIONED, HEALTHY)); } } @ParameterizedTest @ValueSource(ints = {NODE_PER_RACK + 1, 2 * NODE_PER_RACK + 1}) public void chooseNodeWithUsedNodesMultipleRack(int datanodeCount) throws SCMException { assumeTrue(datanodeCount > NODE_PER_RACK); setup(datanodeCount); int nodeNum = 1; List<DatanodeDetails> excludedNodes = new ArrayList<>(); List<DatanodeDetails> usedNodes = new ArrayList<>(); // 2 replicas, two existing datanodes on same rack usedNodes.add(datanodes.get(0)); usedNodes.add(datanodes.get(1)); List<DatanodeDetails> datanodeDetails = policy.chooseDatanodes(usedNodes, excludedNodes, null, nodeNum, 0, 5); assertEquals(nodeNum, datanodeDetails.size()); // New DN should be on different rack than DN0 & DN1 assertTrue(!cluster.isSameParent( datanodes.get(0), datanodeDetails.get(0)) && !cluster.isSameParent(datanodes.get(1), datanodeDetails.get(0))); // 2 replicas, two existing datanodes on different rack usedNodes.clear(); // 1st Replica on rack0 usedNodes.add(datanodes.get(0)); // 2nd Replica on rack1 usedNodes.add(datanodes.get(5)); datanodeDetails = policy.chooseDatanodes(usedNodes, excludedNodes, null, nodeNum, 0, 5); assertEquals(nodeNum, datanodeDetails.size()); // New replica should be either on rack0 or rack1 assertTrue(cluster.isSameParent( datanodes.get(0), datanodeDetails.get(0)) || cluster.isSameParent(datanodes.get(5), datanodeDetails.get(0))); } @Test public void chooseSingleNodeRackWithUsedAndExcludeNodes() throws SCMException { int datanodeCount = 5; setup(datanodeCount); int nodeNum = 1; List<DatanodeDetails> excludedNodes = new ArrayList<>(); List<DatanodeDetails> usedNodes = new ArrayList<>(); // 2 replicas, two existing datanodes on same rack usedNodes.add(datanodes.get(0)); usedNodes.add(datanodes.get(1)); excludedNodes.add(datanodes.get(2)); List<DatanodeDetails> datanodeDetails = policy.chooseDatanodes(usedNodes, excludedNodes, null, nodeNum, 0, 5); assertEquals(nodeNum, datanodeDetails.size()); assertTrue(cluster.isSameParent(datanodes.get(0), datanodeDetails.get(0))); assertTrue(cluster.isSameParent(datanodes.get(1), datanodeDetails.get(0))); assertNotEquals(excludedNodes.get(0), datanodeDetails.get(0)); // Required 2 DN for 2 replica nodeNum = 2; // One replica exist usedNodes.clear(); // 1st Replica on rack0 usedNodes.add(datanodes.get(0)); datanodeDetails = policy.chooseDatanodes(usedNodes, excludedNodes, null, nodeNum, 0, 5); assertEquals(nodeNum, datanodeDetails.size()); assertNotEquals(excludedNodes.get(0), datanodeDetails.get(0)); assertNotEquals(excludedNodes.get(0), datanodeDetails.get(1)); nodeNum = 3; // No replica exist usedNodes.clear(); datanodeDetails = policy.chooseDatanodes(usedNodes, excludedNodes, null, nodeNum, 0, 5); assertEquals(nodeNum, datanodeDetails.size()); assertNotEquals(excludedNodes.get(0), datanodeDetails.get(0)); assertNotEquals(excludedNodes.get(0), datanodeDetails.get(1)); } @ParameterizedTest @MethodSource("numDatanodes") public void chooseNodeWithUsedAndExcludeNodesMultipleRack(int datanodeCount) throws SCMException { assumeTrue(datanodeCount > NODE_PER_RACK); setup(datanodeCount); int nodeNum = 2; List<DatanodeDetails> excludedNodes = new ArrayList<>(); List<DatanodeDetails> usedNodes = new ArrayList<>(); // 1 replica usedNodes.add(datanodes.get(0)); // 1 exclude node excludedNodes.add(datanodes.get(1)); List<DatanodeDetails> datanodeDetails = policy.chooseDatanodes(usedNodes, excludedNodes, null, nodeNum, 0, 5); assertEquals(nodeNum, datanodeDetails.size()); // Exclude node should not be returned assertNotEquals(excludedNodes.get(0), datanodeDetails.get(0)); assertNotEquals(excludedNodes.get(0), datanodeDetails.get(1)); usedNodes.clear(); excludedNodes.clear(); // 1 replica // Multiple exclude nodes usedNodes.add(datanodes.get(0)); excludedNodes.add(datanodes.get(1)); excludedNodes.add(datanodes.get(2)); datanodeDetails = policy.chooseDatanodes(usedNodes, excludedNodes, null, nodeNum, 0, 5); assertEquals(nodeNum, datanodeDetails.size()); assertNotEquals(excludedNodes.get(0), datanodeDetails.get(0)); assertNotEquals(excludedNodes.get(1), datanodeDetails.get(0)); assertNotEquals(excludedNodes.get(0), datanodeDetails.get(1)); assertNotEquals(excludedNodes.get(1), datanodeDetails.get(1)); } @ParameterizedTest @MethodSource("numDatanodes") public void chooseNodeWithOnlyExcludeAndNoUsedNodes(int datanodeCount) throws SCMException { assumeTrue(datanodeCount > NODE_PER_RACK); setup(datanodeCount); int nodeNum = 3; List<DatanodeDetails> excludedNodes = new ArrayList<>(); // 1 exclude node excludedNodes.add(datanodes.get(1)); List<DatanodeDetails> datanodeDetails = policy.chooseDatanodes(null, excludedNodes, null, nodeNum, 0, 5); assertEquals(nodeNum, datanodeDetails.size()); // Exclude node should not be returned assertNotEquals(excludedNodes.get(0), datanodeDetails.get(0)); assertNotEquals(excludedNodes.get(0), datanodeDetails.get(1)); excludedNodes.clear(); // Multiple exclude nodes excludedNodes.add(datanodes.get(1)); excludedNodes.add(datanodes.get(2)); datanodeDetails = policy.chooseDatanodes(null, excludedNodes, null, nodeNum, 0, 5); assertEquals(nodeNum, datanodeDetails.size()); // Exclude node should not be returned assertNotEquals(excludedNodes.get(0), datanodeDetails.get(0)); assertNotEquals(excludedNodes.get(0), datanodeDetails.get(1)); assertNotEquals(excludedNodes.get(1), datanodeDetails.get(0)); assertNotEquals(excludedNodes.get(1), datanodeDetails.get(1)); } @ParameterizedTest @ValueSource(ints = {11, 12, 13, 14, 15}) public void testNoFallbackWithUsedNodes(int datanodeCount) { assumeTrue(datanodeCount > (NODE_PER_RACK * 2) && (datanodeCount <= NODE_PER_RACK * 3)); setup(datanodeCount); List<DatanodeDetails> usedNodes = new ArrayList<>(); usedNodes.add(datanodes.get(0)); // 5 replicas. there are only 3 racks. policy prohibit fallback should fail. int nodeNum = 5; Exception e = assertThrows(Exception.class, () -> policyNoFallback.chooseDatanodes(usedNodes, null, null, nodeNum, 0, 15), "Fallback prohibited, this call should fail"); assertEquals("SCMException", e.getClass().getSimpleName()); // get metrics long totalRequest = metrics.getDatanodeRequestCount(); long successCount = metrics.getDatanodeChooseSuccessCount(); long tryCount = metrics.getDatanodeChooseAttemptCount(); long compromiseCount = metrics.getDatanodeChooseFallbackCount(); assertEquals(nodeNum, totalRequest); assertThat(successCount).withFailMessage("Not enough success count") .isGreaterThanOrEqualTo(1); assertThat(tryCount).withFailMessage("Not enough try count") .isGreaterThanOrEqualTo(1); assertEquals(0, compromiseCount); } @Test public void chooseNodeWithUsedAndFavouredNodesMultipleRack() throws SCMException { int datanodeCount = 12; setup(datanodeCount); int nodeNum = 1; List<DatanodeDetails> usedNodes = new ArrayList<>(); List<DatanodeDetails> favouredNodes = new ArrayList<>(); // 2 replica usedNodes.add(datanodes.get(0)); usedNodes.add(datanodes.get(1)); // 1 favoured node favouredNodes.add(datanodes.get(2)); List<DatanodeDetails> datanodeDetails = policy.chooseDatanodes(usedNodes, null, favouredNodes, nodeNum, 0, 5); assertEquals(nodeNum, datanodeDetails.size()); // Favoured node should not be returned, // Returned node should be on the different rack than the favoured node. assertFalse(cluster.isSameParent( favouredNodes.get(0), datanodeDetails.get(0))); favouredNodes.clear(); // 1 favoured node favouredNodes.add(datanodes.get(6)); datanodeDetails = policy.chooseDatanodes(usedNodes, null, favouredNodes, nodeNum, 0, 5); assertEquals(nodeNum, datanodeDetails.size()); // Favoured node should be returned, // as favoured node is in the different rack as used nodes. assertEquals(favouredNodes.get(0), datanodeDetails.get(0)); } @Test public void testSourceDatanodeIsNotChosenAsTarget() { setup(2); List<DatanodeDetails> usedNodes = new ArrayList<>(); usedNodes.add(datanodes.get(0)); dnInfos.get(1).setNodeStatus(NodeStatus.inServiceHealthyReadOnly()); assertThrows(SCMException.class, () -> policy.chooseDatanodes(usedNodes, null, null, 1, 0, 0), "No target datanode, this call should fail"); } }
apache/synapse
35,405
modules/transports/optional/fix/src/main/java/org/apache/synapse/transport/fix/FIXUtils.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.synapse.transport.fix; import org.apache.axiom.attachments.ByteArrayDataSource; import org.apache.axiom.om.OMAbstractFactory; import org.apache.axiom.om.OMElement; import org.apache.axiom.om.OMNamespace; import org.apache.axiom.soap.SOAPBody; import org.apache.axiom.soap.SOAPEnvelope; import org.apache.axiom.soap.SOAPFactory; import org.apache.axis2.AxisFault; import org.apache.axis2.context.MessageContext; import org.apache.axis2.transport.base.BaseUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import quickfix.*; import quickfix.field.*; import javax.activation.DataHandler; import javax.activation.DataSource; import javax.xml.namespace.QName; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.net.InetSocketAddress; import java.net.SocketAddress; import java.util.*; public class FIXUtils { private static final Log log = LogFactory.getLog(FIXUtils.class); private static FIXUtils _instance = new FIXUtils(); public static FIXUtils getInstance() { return _instance; } /** * FIX messages are non-XML. So convert them into XML using the AXIOM API. * Put the FIX message into an Axis2 MessageContext.The basic format of the * generated SOAP envelope; * <p/> * <soapEnvelope> * <soapBody> * <message> * <header> ....</header> * <body> .... </body> * <trailer> .... </trailer> * </message> * </soapBody> * </soapEnvelope> * * @param message the FIX message * @param counter application level sequence number of the message * @param sessionID the incoming session * @param msgCtx the Axis2 MessageContext to hold the FIX message * @throws AxisFault the exception thrown when invalid soap envelopes are set to the msgCtx */ public void setSOAPEnvelope(Message message, int counter, String sessionID, MessageContext msgCtx) throws AxisFault { if (log.isDebugEnabled()) { log.debug("Creating SOAP envelope for FIX message..."); } SOAPFactory soapFactory = OMAbstractFactory.getSOAP11Factory(); OMElement msg = soapFactory.createOMElement(FIXConstants.FIX_MESSAGE, null); msg.addAttribute(soapFactory.createOMAttribute(FIXConstants.FIX_MESSAGE_INCOMING_SESSION, null, sessionID)); msg.addAttribute(soapFactory.createOMAttribute (FIXConstants.FIX_MESSAGE_COUNTER, null, String.valueOf(counter))); OMElement header = soapFactory.createOMElement(FIXConstants.FIX_HEADER, null); OMElement body = soapFactory.createOMElement(FIXConstants.FIX_BODY, null); OMElement trailer = soapFactory.createOMElement(FIXConstants.FIX_TRAILER, null); //process FIX header Iterator<Field<?>> iter = message.getHeader().iterator(); if (iter != null) { while (iter.hasNext()) { Field<?> field = iter.next(); OMElement msgField = soapFactory.createOMElement(FIXConstants.FIX_FIELD, null); msgField.addAttribute(soapFactory.createOMAttribute(FIXConstants.FIX_FIELD_ID, null, String.valueOf(field.getTag()))); Object value = field.getObject(); if (value instanceof byte[]) { DataSource dataSource = new ByteArrayDataSource((byte[]) value); DataHandler dataHandler = new DataHandler(dataSource); String contentID = msgCtx.addAttachment(dataHandler); OMElement binaryData = soapFactory.createOMElement( FIXConstants.FIX_BINARY_FIELD, null); String binaryCID = "cid:" + contentID; binaryData.addAttribute(FIXConstants.FIX_MESSAGE_REFERENCE, binaryCID, null); msgField.addChild(binaryData); } else { createOMText(soapFactory, msgField, value.toString()); } header.addChild(msgField); } } //process FIX body convertFIXBodyToXML(message, body, soapFactory, msgCtx); //process FIX trailer iter = message.getTrailer().iterator(); if (iter != null) { while (iter.hasNext()) { Field<?> field = iter.next(); OMElement msgField = soapFactory.createOMElement(FIXConstants.FIX_FIELD, null); msgField.addAttribute(soapFactory. createOMAttribute(FIXConstants.FIX_FIELD_ID, null, String.valueOf(field.getTag()))); Object value = field.getObject(); if (value instanceof byte[]) { DataSource dataSource = new ByteArrayDataSource((byte[]) value); DataHandler dataHandler = new DataHandler(dataSource); String contentID = msgCtx.addAttachment(dataHandler); OMElement binaryData = soapFactory.createOMElement( FIXConstants.FIX_BINARY_FIELD, null); String binaryCID = "cid:" + contentID; binaryData.addAttribute(FIXConstants.FIX_MESSAGE_REFERENCE, binaryCID, null); msgField.addChild(binaryData); } else { createOMText(soapFactory, msgField, value.toString()); } trailer.addChild(msgField); } } msg.addChild(header); msg.addChild(body); msg.addChild(trailer); SOAPEnvelope envelope = soapFactory.getDefaultEnvelope(); envelope.getBody().addChild(msg); msgCtx.setEnvelope(envelope); } /** * Constructs the XML infoset for the FIX message body * * @param message the FIX message * @param body the body element of the XML infoset * @param soapFactory the SOAP factory to create XML elements * @param msgCtx the Axis2 Message context * @throws AxisFault on error */ private void convertFIXBodyToXML(FieldMap message, OMElement body, SOAPFactory soapFactory, MessageContext msgCtx) throws AxisFault{ if (log.isDebugEnabled()) { log.debug("Generating FIX message body (Message ID: " + msgCtx.getMessageID() + ")"); } Iterator<Field<?>> iter = message.iterator(); if (iter != null) { while (iter.hasNext()) { Field<?> field = iter.next(); OMElement msgField = soapFactory.createOMElement(FIXConstants.FIX_FIELD, null); msgField.addAttribute(soapFactory. createOMAttribute(FIXConstants.FIX_FIELD_ID, null, String.valueOf(field.getTag()))); Object value = field.getObject(); if (value instanceof byte[]) { DataSource dataSource = new ByteArrayDataSource((byte[]) value); DataHandler dataHandler = new DataHandler(dataSource); String contentID = msgCtx.addAttachment(dataHandler); OMElement binaryData = soapFactory.createOMElement( FIXConstants.FIX_BINARY_FIELD, null); String binaryCID = "cid:" + contentID; binaryData.addAttribute(FIXConstants.FIX_MESSAGE_REFERENCE, binaryCID, null); msgField.addChild(binaryData); } else { createOMText(soapFactory, msgField, value.toString()); } body.addChild(msgField); } } //process FIX repeating groups Iterator<Integer> groupKeyItr = message.groupKeyIterator(); if (groupKeyItr != null) { while (groupKeyItr.hasNext()) { int groupKey = groupKeyItr.next(); OMElement groupsField = soapFactory.createOMElement(FIXConstants.FIX_GROUPS, null); groupsField.addAttribute(FIXConstants.FIX_FIELD_ID, String.valueOf(groupKey),null); List<Group> groupList = message.getGroups(groupKey); Iterator<Group> groupIterator = groupList.iterator(); while (groupIterator.hasNext()) { Group msgGroup = groupIterator.next(); OMElement groupField = soapFactory.createOMElement(FIXConstants.FIX_GROUP, null); // rec. call the method to process the repeating groups convertFIXBodyToXML(msgGroup, groupField, soapFactory, msgCtx); groupsField.addChild(groupField); } body.addChild(groupsField); } } } private void generateFIXBody(OMElement node, FieldMap message, MessageContext msgCtx, boolean withNs, String nsURI, String nsPrefix) throws IOException { Iterator bodyElements = node.getChildElements(); while (bodyElements.hasNext()) { OMElement bodyNode = (OMElement) bodyElements.next(); String nodeLocalName = bodyNode.getLocalName(); //handle repeating groups if (nodeLocalName.equals(FIXConstants.FIX_GROUPS)){ int groupsKey = Integer.parseInt(bodyNode.getAttributeValue( new QName(FIXConstants.FIX_FIELD_ID))); Group group; Iterator groupElements = bodyNode.getChildElements(); while (groupElements.hasNext()){ OMElement groupNode = (OMElement) groupElements.next(); Iterator groupFields = groupNode.getChildrenWithName(new QName(FIXConstants.FIX_FIELD)); List<Integer> idList = new ArrayList<Integer>(); while (groupFields.hasNext()) { OMElement fieldNode = (OMElement) groupFields.next(); idList.add(Integer.parseInt(fieldNode.getAttributeValue( new QName(FIXConstants.FIX_FIELD_ID)))); } int[] order = new int[idList.size()]; for (int i = 0; i < order.length; i++) { order[i] = idList.get(i); } group = new Group(groupsKey, order[0], order); generateFIXBody(groupNode, group, msgCtx, withNs, nsURI, nsPrefix); message.addGroup(group); } } else { String tag; if (withNs) { tag = bodyNode.getAttributeValue(new QName(nsURI, FIXConstants.FIX_FIELD_ID, nsPrefix)); } else { tag = bodyNode.getAttributeValue(new QName(FIXConstants.FIX_FIELD_ID)); } String value = null; OMElement child = bodyNode.getFirstElement(); if (child != null) { String href; if (withNs) { href = bodyNode.getFirstElement(). getAttributeValue(new QName(nsURI, FIXConstants.FIX_FIELD_ID, nsPrefix)) ; } else { href = bodyNode.getFirstElement(). getAttributeValue(new QName(FIXConstants.FIX_MESSAGE_REFERENCE)); } if (href != null) { DataHandler binaryDataHandler = msgCtx.getAttachment(href.substring(4)); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); binaryDataHandler.writeTo(outputStream); value = new String(outputStream.toByteArray()); } } else { value = bodyNode.getText(); } if (value != null) { message.setString(Integer.parseInt(tag), value); } } } } /** * Extract the FIX message embedded in an Axis2 MessageContext * * @param msgCtx the Axis2 MessageContext * @return a FIX message * @throws java.io.IOException the exception thrown when handling erroneous binary content */ public Message createFIXMessage(MessageContext msgCtx) throws IOException { if (log.isDebugEnabled()) { log.debug("Extracting FIX message from the message context (Message ID: " + msgCtx.getMessageID() + ")"); } boolean withNs = false; String nsPrefix = null; String nsURI = null; Message message = new Message(); SOAPBody soapBody = msgCtx.getEnvelope().getBody(); //find namespace information embedded in the FIX payload OMNamespace ns = getNamespaceOfFIXPayload(soapBody); if (ns != null) { withNs = true; nsPrefix = ns.getPrefix(); nsURI = ns.getNamespaceURI(); } OMElement messageNode; if (withNs) { messageNode = soapBody.getFirstChildWithName(new QName(nsURI, FIXConstants.FIX_MESSAGE, nsPrefix)); } else { messageNode = soapBody.getFirstChildWithName(new QName(FIXConstants.FIX_MESSAGE)); } Iterator messageElements = messageNode.getChildElements(); while (messageElements.hasNext()) { OMElement node = (OMElement) messageElements.next(); //create FIX header if (node.getQName().getLocalPart().equals(FIXConstants.FIX_HEADER)) { Iterator headerElements = node.getChildElements(); while (headerElements.hasNext()) { OMElement headerNode = (OMElement) headerElements.next(); String tag; if (withNs) { tag = headerNode.getAttributeValue(new QName(nsURI, FIXConstants.FIX_FIELD_ID, nsPrefix)); } else { tag = headerNode.getAttributeValue(new QName(FIXConstants.FIX_FIELD_ID)); } String value = null; OMElement child = headerNode.getFirstElement(); if (child != null) { String href; if (withNs) { href = headerNode.getFirstElement().getAttributeValue( new QName(nsURI, FIXConstants.FIX_MESSAGE_REFERENCE, nsPrefix)); } else { href = headerNode.getFirstElement(). getAttributeValue(new QName(FIXConstants.FIX_MESSAGE_REFERENCE)); } if (href != null) { DataHandler binaryDataHandler = msgCtx.getAttachment(href.substring(4)); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); binaryDataHandler.writeTo(outputStream); value = new String(outputStream.toByteArray()); } } else { value = headerNode.getText(); } if (value != null) { message.getHeader().setString(Integer.parseInt(tag), value); } } } else if (node.getQName().getLocalPart().equals(FIXConstants.FIX_BODY)) { //create FIX body generateFIXBody(node, message, msgCtx, withNs, nsURI, nsPrefix); } else if (node.getQName().getLocalPart().equals(FIXConstants.FIX_TRAILER)) { //create FIX trailer Iterator trailerElements = node.getChildElements(); while (trailerElements.hasNext()) { OMElement trailerNode = (OMElement) trailerElements.next(); String tag; if (withNs) { tag = trailerNode.getAttributeValue(new QName(nsURI, FIXConstants.FIX_FIELD_ID, nsPrefix)); } else { tag = trailerNode.getAttributeValue(new QName(FIXConstants.FIX_FIELD_ID)); } String value = null; OMElement child = trailerNode.getFirstElement(); if (child != null) { String href; if (withNs) { href = trailerNode.getFirstElement().getAttributeValue( new QName(nsURI, FIXConstants.FIX_FIELD_ID, nsPrefix)); } else { href = trailerNode.getFirstElement(). getAttributeValue(new QName(FIXConstants.FIX_MESSAGE_REFERENCE)); } if (href != null) { DataHandler binaryDataHandler = msgCtx.getAttachment(href.substring(4)); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); binaryDataHandler.writeTo(outputStream); value = new String(outputStream.toByteArray()); } } else { value = trailerNode.getText(); } if (value != null) { message.getTrailer().setString(Integer.parseInt(tag), value); } } } } return message; } /** * Generate EPRs for the specified FIX service. A FIX end point can be uniquely * identified by a <host(IP), port> pair. Add some additional FIX session details * so the EPRs are more self descriptive. * A FIX EPR generated here looks like; * fix://10.100.1.80:9898?BeginString=FIX.4.4&SenderCompID=BANZAI&TargetCompID=EXEC& * SessionQualifier=mySession&Serviec=StockQuoteProxy * * @param acceptor the SocketAcceptor associated with the service * @param serviceName the name of the service * @param ip the IP address of the host * @return an array of EPRs for the specified service in String format */ public static String[] generateEPRs(SocketAcceptor acceptor, String serviceName, String ip) { //Get all the addresses associated with the acceptor Map<SessionID, SocketAddress> socketAddresses = acceptor.getAcceptorAddresses(); //Get all the sessions (SessionIDs) associated with the acceptor ArrayList<SessionID> sessions = acceptor.getSessions(); String[] EPRList = new String[sessions.size()]; //Generate an EPR for each session/socket address for (int i = 0; i < sessions.size(); i++) { SessionID sessionID = sessions.get(i); InetSocketAddress socketAddress = (InetSocketAddress) socketAddresses.get(sessionID); EPRList[i] = FIXConstants.FIX_PREFIX + ip + ":" + socketAddress.getPort() + "?" + FIXConstants.BEGIN_STRING + "=" + sessionID.getBeginString() + "&" + FIXConstants.SENDER_COMP_ID + "=" + sessionID.getTargetCompID() + "&" + FIXConstants.TARGET_COMP_ID + "=" + sessionID.getSenderCompID(); String sessionQualifier = sessionID.getSessionQualifier(); if (sessionQualifier != null && !sessionQualifier.equals("")) { EPRList[i] += "&" + FIXConstants.SESSION_QUALIFIER + "=" + sessionQualifier; } String senderSubID = sessionID.getSenderSubID(); if (senderSubID != null && !senderSubID.equals("")) { EPRList[i] += "&" + FIXConstants.SENDER_SUB_ID + "=" + senderSubID; } String targetSubID = sessionID.getTargetSubID(); if (targetSubID != null && !targetSubID.equals("")) { EPRList[i] += "&" + FIXConstants.TARGET_SUB_ID + "=" + targetSubID; } String senderLocationID = sessionID.getSenderLocationID(); if (senderLocationID != null && !senderLocationID.equals("")) { EPRList[i] += "&" + FIXConstants.SENDER_LOCATION_ID + "=" + senderLocationID; } String targetLocationID = sessionID.getTargetLocationID(); if (targetLocationID != null && !targetLocationID.equals("")) { EPRList[i] += "&" + FIXConstants.TARGET_LOCATION_ID + "=" + targetLocationID; } EPRList[i] += "&Service=" + serviceName; } return EPRList; } public static String[] getEPRs(SessionSettings settings) throws FieldConvertError, ConfigError { Iterator<SessionID> sessions = settings.sectionIterator(); String[] EPRs = new String[settings.size()]; int i = 0; while (sessions.hasNext()) { SessionID session = sessions.next(); String EPR = FIXConstants.FIX_PREFIX; String paramValue; EPR += settings.getString(session, FIXConstants.SOCKET_CONNECT_HOST); EPR += ":" + settings.getString(session, FIXConstants.SOCKET_CONNECT_PORT); EPR += "?" + FIXConstants.BEGIN_STRING + "="; EPR += settings.getString(session, FIXConstants.BEGIN_STRING); EPR += "&" + FIXConstants.SENDER_COMP_ID + "="; EPR += settings.getString(session, FIXConstants.SENDER_COMP_ID); EPR += "&" + FIXConstants.TARGET_COMP_ID + "="; EPR += settings.getString(session, FIXConstants.TARGET_COMP_ID); try { paramValue = settings.getString(session, FIXConstants.SENDER_SUB_ID); if (paramValue != null) { EPR += "&" + FIXConstants.SENDER_SUB_ID + "="; EPR += paramValue; } } catch (ConfigError ignore) { } try { paramValue = settings.getString(session, FIXConstants.SENDER_LOCATION_ID); if (paramValue != null) { EPR += "&" + FIXConstants.SENDER_LOCATION_ID + "="; EPR += paramValue; } } catch (ConfigError ignore) { } try { paramValue = settings.getString(session, FIXConstants.TARGET_SUB_ID); if (paramValue != null) { EPR += "&" + FIXConstants.TARGET_SUB_ID + "="; EPR += paramValue; } } catch (ConfigError ignore) { } try { paramValue = settings.getString(session, FIXConstants.TARGET_LOCATION_ID); if (paramValue != null) { EPR += "&" + FIXConstants.TARGET_LOCATION_ID + "="; EPR += paramValue; } } catch (ConfigError ignore) { } EPRs[i] = EPR; } return EPRs; } /** * Compares two given FIX URL strings. The second URL is considered equal to the * first URL if all the properties in the first URL also exist in the second URL * and if they have equals values. * * @param url1 a FIX URL String * @param url2 a FIX URL String * @return a boolean value */ public static boolean compareURLs(String url1, String url2) { if (!url1.substring(0, url1.indexOf("?")).equals(url2.substring(0, url2.indexOf("?")))) { return false; } else { Hashtable<String,String> properties1 = BaseUtils.getEPRProperties(url1); Hashtable<String, String> properties2 = BaseUtils.getEPRProperties(url2); for (Map.Entry<String,String> entry : properties1.entrySet()) { if (!properties2.containsKey(entry.getKey())) { return false; } else if (!properties1.get(entry.getKey()).equals(entry.getValue())) { return false; } } } return true; } /* * This is here because AXIOM does not support removing CDATA tags yet. Given a String embedded in * CDATA tags this method will return the String element only. * * @param str the String with CDATA tags * @return String with CDATA tags stripped * private static String removeCDATA(String str) { if (str.indexOf("<![CDATA[") != -1) { str = str.split("CDATA")[1].split("]></field>")[0]; str= str.substring(1, str.length()-1); return str; } else { return str; } }*/ /** * Extracts the fields related to message forwarding (third party routing) from * the FIX header. * * @param message the FIX message * @return a Map of forwarding parameters */ public static Map<String, String> getMessageForwardingParameters(Message message) { Map<String, String> map = new HashMap<String, String>(); String value = getHeaderFieldValue(message, BeginString.FIELD); map.put(FIXConstants.BEGIN_STRING, value); value = getHeaderFieldValue(message, SenderCompID.FIELD); map.put(FIXConstants.SENDER_COMP_ID, value); value = getHeaderFieldValue(message, SenderSubID.FIELD); map.put(FIXConstants.SENDER_SUB_ID, value); value = getHeaderFieldValue(message, SenderLocationID.FIELD); map.put(FIXConstants.SENDER_LOCATION_ID, value); value = getHeaderFieldValue(message, TargetCompID.FIELD); map.put(FIXConstants.TARGET_COMP_ID, value); value = getHeaderFieldValue(message, DeliverToCompID.FIELD); map.put(FIXConstants.DELIVER_TO_COMP_ID, value); value = getHeaderFieldValue(message, DeliverToSubID.FIELD); map.put(FIXConstants.DELIVER_TO_SUB_ID, value); value = getHeaderFieldValue(message, DeliverToLocationID.FIELD); map.put(FIXConstants.DELIVER_TO_LOCATION_ID, value); value = getHeaderFieldValue(message, OnBehalfOfCompID.FIELD); map.put(FIXConstants.ON_BEHALF_OF_COMP_ID, value); value = getHeaderFieldValue(message, OnBehalfOfSubID.FIELD); map.put(FIXConstants.ON_BEHALF_OF_SUB_ID, value); value = getHeaderFieldValue(message, OnBehalfOfLocationID.FIELD); map.put(FIXConstants.ON_BEHALF_OF_LOCATION_ID, value); return map; } private static String getHeaderFieldValue(Message message, int tag) { try { return message.getHeader().getString(tag); } catch (FieldNotFound fieldNotFound) { return null; } } /** * Extracts the name of the service which processed the message from the MessageContext * * @param msgCtx Axis2 MessageContext of a message * @return name of the AxisService * @throws org.apache.axis2.AxisFault on error */ public static String getServiceName(MessageContext msgCtx) throws AxisFault { Object serviceParam = msgCtx.getProperty(FIXConstants.FIX_SERVICE_NAME); if (serviceParam != null) { String serviceName = serviceParam.toString(); if (serviceName != null && !serviceName.equals("")) { return serviceName; } } Map trpHeaders = (Map) msgCtx.getProperty(MessageContext.TRANSPORT_HEADERS); //try to get the service from the transport headers if (trpHeaders != null) { String serviceName = (String) trpHeaders.get(FIXConstants.FIX_MESSAGE_SERVICE); if (serviceName != null) { return serviceName; } } throw new AxisFault("Unable to find a valid service for the message"); } /** * Extracts the application type for the message from the message context * * @param msgCtx Axis2 Message Context * @return application type of the message */ public static String getFixApplication(MessageContext msgCtx) { Map trpHeaders = (Map) msgCtx.getProperty(MessageContext.TRANSPORT_HEADERS); //try to get the application type from the transport headers String fixApplication = null; if (trpHeaders != null) { fixApplication = (String) trpHeaders.get(FIXConstants.FIX_MESSAGE_APPLICATION); } return fixApplication; } /** * Creates a Map of transport headers for a message * * @param serviceName name of the service to which the message belongs to * @param fixApplication FIX application type * @return a Map of transport headers */ public static Map<String, String> getTransportHeaders(String serviceName, String fixApplication) { Map<String, String> trpHeaders = new HashMap<String, String>(); trpHeaders.put(FIXConstants.FIX_MESSAGE_SERVICE, serviceName); trpHeaders.put(FIXConstants.FIX_MESSAGE_APPLICATION, fixApplication); return trpHeaders; } /** * Reads a FIX EPR and returns the host and port on a String array * * @param fixEPR a FIX EPR * @return an array of Strings containing addressing elements * @throws AxisFault on error */ public static String[] getSocketAddressElements(String fixEPR) throws AxisFault { int propPos = fixEPR.indexOf("?"); if (propPos != -1 && fixEPR.startsWith(FIXConstants.FIX_PREFIX)) { String address = fixEPR.substring(FIXConstants.FIX_PREFIX.length(), propPos); String[] socketAddressElemets = address.split(":"); if (socketAddressElemets.length == 2) { return socketAddressElemets; } } throw new AxisFault("Malformed FIX EPR: " + fixEPR); } /** * Reads the SOAP body of a message and attempts to retreive the application level * sequence number * * @param msgCtx Axis2 MessageContext * @return application level sequence number or -1 */ public static int getSequenceNumber(MessageContext msgCtx) { int seqNum; SOAPBody body = msgCtx.getEnvelope().getBody(); OMNamespace ns = getNamespaceOfFIXPayload(body); if (ns == null) { OMElement messageNode = body.getFirstChildWithName(new QName(FIXConstants.FIX_MESSAGE)); String value = messageNode.getAttributeValue(new QName(FIXConstants.FIX_MESSAGE_COUNTER)); if (value != null) { seqNum = Integer.parseInt(value); } else { seqNum = -1; } } else { seqNum = getSequenceNumber(body, ns); } return seqNum; } /** * Reads the SOAP body of a message and attempts to retreive the application level * sequence number * * @param body Body of the SOAP message * @param ns Namespace * @return application level sequence number or -1 */ private static int getSequenceNumber(SOAPBody body, OMNamespace ns) { OMElement messageNode = body.getFirstChildWithName(new QName(ns.getNamespaceURI(), FIXConstants.FIX_MESSAGE, ns.getPrefix())); String value = messageNode.getAttributeValue(new QName(ns.getNamespaceURI(), FIXConstants.FIX_MESSAGE_COUNTER, ns.getPrefix())); if (value != null) { return Integer.parseInt(value); } else { return -1; } } /** * Reads the SOAP body of a message and attempts to retreive the session identifier string * * @param msgCtx Axis2 MessageContext * @return a String uniquely identifying a session or null */ public static String getSourceSession(MessageContext msgCtx) { String srcSession; SOAPBody body = msgCtx.getEnvelope().getBody(); OMNamespace ns = getNamespaceOfFIXPayload(body); if (ns == null) { OMElement messageNode = body.getFirstChildWithName(new QName(FIXConstants.FIX_MESSAGE)); srcSession = messageNode.getAttributeValue(new QName( FIXConstants.FIX_MESSAGE_INCOMING_SESSION)); } else { srcSession = getSourceSession(body, ns); } return srcSession; } /** * Reads the SOAP body of a message and attempts to retrieve the session identifier string * with a namesapce * * @param body Body of the SOAP message * @param ns Namespace * @return a String uniquely identifying a session or null */ private static String getSourceSession(SOAPBody body, OMNamespace ns) { OMElement messageNode = body.getFirstChildWithName(new QName(ns.getNamespaceURI(), FIXConstants.FIX_MESSAGE, ns.getPrefix())); return messageNode.getAttributeValue(new QName(ns.getNamespaceURI(), FIXConstants.FIX_MESSAGE_INCOMING_SESSION, ns.getPrefix())); } /** * Creates a text node within a CDATA section selectively by looking at the enclosing text. * * @param soapFactory SOAPFactory instance used to create the OMText object * @param field Parent OMElement (field element) * @param text String text to be added to the field element */ private static void createOMText(SOAPFactory soapFactory, OMElement field, String text) { if (text == null) { return; } if (text.indexOf('<') == -1 && text.indexOf('&') == -1 && text.indexOf('>') == -1) { soapFactory.createOMText(field, text); } else { soapFactory.createOMText(field, text, OMElement.CDATA_SECTION_NODE); } } /** * Read the FIX message payload and identify the namespace if exists * * @param fixBody FIX message payload * @return namespace as a OMNamespace */ public static OMNamespace getNamespaceOfFIXPayload(SOAPBody fixBody){ return fixBody.getFirstElementNS(); } }
googleapis/google-cloud-java
35,138
java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Presets.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1beta1/explanation.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1beta1; /** * * * <pre> * Preset configuration for example-based explanations * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.Presets} */ public final class Presets extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.Presets) PresetsOrBuilder { private static final long serialVersionUID = 0L; // Use Presets.newBuilder() to construct. private Presets(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Presets() { query_ = 0; modality_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new Presets(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.ExplanationProto .internal_static_google_cloud_aiplatform_v1beta1_Presets_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.ExplanationProto .internal_static_google_cloud_aiplatform_v1beta1_Presets_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.Presets.class, com.google.cloud.aiplatform.v1beta1.Presets.Builder.class); } /** * * * <pre> * Preset option controlling parameters for query speed-precision trade-off * </pre> * * Protobuf enum {@code google.cloud.aiplatform.v1beta1.Presets.Query} */ public enum Query implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * More precise neighbors as a trade-off against slower response. * </pre> * * <code>PRECISE = 0;</code> */ PRECISE(0), /** * * * <pre> * Faster response as a trade-off against less precise neighbors. * </pre> * * <code>FAST = 1;</code> */ FAST(1), UNRECOGNIZED(-1), ; /** * * * <pre> * More precise neighbors as a trade-off against slower response. * </pre> * * <code>PRECISE = 0;</code> */ public static final int PRECISE_VALUE = 0; /** * * * <pre> * Faster response as a trade-off against less precise neighbors. * </pre> * * <code>FAST = 1;</code> */ public static final int FAST_VALUE = 1; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static Query valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static Query forNumber(int value) { switch (value) { case 0: return PRECISE; case 1: return FAST; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<Query> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<Query> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<Query>() { public Query findValueByNumber(int number) { return Query.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.Presets.getDescriptor().getEnumTypes().get(0); } private static final Query[] VALUES = values(); public static Query valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private Query(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.cloud.aiplatform.v1beta1.Presets.Query) } /** * * * <pre> * Preset option controlling parameters for different modalities * </pre> * * Protobuf enum {@code google.cloud.aiplatform.v1beta1.Presets.Modality} */ public enum Modality implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * Should not be set. Added as a recommended best practice for enums * </pre> * * <code>MODALITY_UNSPECIFIED = 0;</code> */ MODALITY_UNSPECIFIED(0), /** * * * <pre> * IMAGE modality * </pre> * * <code>IMAGE = 1;</code> */ IMAGE(1), /** * * * <pre> * TEXT modality * </pre> * * <code>TEXT = 2;</code> */ TEXT(2), /** * * * <pre> * TABULAR modality * </pre> * * <code>TABULAR = 3;</code> */ TABULAR(3), UNRECOGNIZED(-1), ; /** * * * <pre> * Should not be set. Added as a recommended best practice for enums * </pre> * * <code>MODALITY_UNSPECIFIED = 0;</code> */ public static final int MODALITY_UNSPECIFIED_VALUE = 0; /** * * * <pre> * IMAGE modality * </pre> * * <code>IMAGE = 1;</code> */ public static final int IMAGE_VALUE = 1; /** * * * <pre> * TEXT modality * </pre> * * <code>TEXT = 2;</code> */ public static final int TEXT_VALUE = 2; /** * * * <pre> * TABULAR modality * </pre> * * <code>TABULAR = 3;</code> */ public static final int TABULAR_VALUE = 3; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static Modality valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static Modality forNumber(int value) { switch (value) { case 0: return MODALITY_UNSPECIFIED; case 1: return IMAGE; case 2: return TEXT; case 3: return TABULAR; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<Modality> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<Modality> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<Modality>() { public Modality findValueByNumber(int number) { return Modality.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.Presets.getDescriptor().getEnumTypes().get(1); } private static final Modality[] VALUES = values(); public static Modality valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private Modality(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.cloud.aiplatform.v1beta1.Presets.Modality) } private int bitField0_; public static final int QUERY_FIELD_NUMBER = 1; private int query_ = 0; /** * * * <pre> * Preset option controlling parameters for speed-precision trade-off when * querying for examples. If omitted, defaults to `PRECISE`. * </pre> * * <code>optional .google.cloud.aiplatform.v1beta1.Presets.Query query = 1;</code> * * @return Whether the query field is set. */ @java.lang.Override public boolean hasQuery() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Preset option controlling parameters for speed-precision trade-off when * querying for examples. If omitted, defaults to `PRECISE`. * </pre> * * <code>optional .google.cloud.aiplatform.v1beta1.Presets.Query query = 1;</code> * * @return The enum numeric value on the wire for query. */ @java.lang.Override public int getQueryValue() { return query_; } /** * * * <pre> * Preset option controlling parameters for speed-precision trade-off when * querying for examples. If omitted, defaults to `PRECISE`. * </pre> * * <code>optional .google.cloud.aiplatform.v1beta1.Presets.Query query = 1;</code> * * @return The query. */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.Presets.Query getQuery() { com.google.cloud.aiplatform.v1beta1.Presets.Query result = com.google.cloud.aiplatform.v1beta1.Presets.Query.forNumber(query_); return result == null ? com.google.cloud.aiplatform.v1beta1.Presets.Query.UNRECOGNIZED : result; } public static final int MODALITY_FIELD_NUMBER = 2; private int modality_ = 0; /** * * * <pre> * The modality of the uploaded model, which automatically configures the * distance measurement and feature normalization for the underlying example * index and queries. If your model does not precisely fit one of these types, * it is okay to choose the closest type. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.Presets.Modality modality = 2;</code> * * @return The enum numeric value on the wire for modality. */ @java.lang.Override public int getModalityValue() { return modality_; } /** * * * <pre> * The modality of the uploaded model, which automatically configures the * distance measurement and feature normalization for the underlying example * index and queries. If your model does not precisely fit one of these types, * it is okay to choose the closest type. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.Presets.Modality modality = 2;</code> * * @return The modality. */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.Presets.Modality getModality() { com.google.cloud.aiplatform.v1beta1.Presets.Modality result = com.google.cloud.aiplatform.v1beta1.Presets.Modality.forNumber(modality_); return result == null ? com.google.cloud.aiplatform.v1beta1.Presets.Modality.UNRECOGNIZED : result; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeEnum(1, query_); } if (modality_ != com.google.cloud.aiplatform.v1beta1.Presets.Modality.MODALITY_UNSPECIFIED.getNumber()) { output.writeEnum(2, modality_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, query_); } if (modality_ != com.google.cloud.aiplatform.v1beta1.Presets.Modality.MODALITY_UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, modality_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.Presets)) { return super.equals(obj); } com.google.cloud.aiplatform.v1beta1.Presets other = (com.google.cloud.aiplatform.v1beta1.Presets) obj; if (hasQuery() != other.hasQuery()) return false; if (hasQuery()) { if (query_ != other.query_) return false; } if (modality_ != other.modality_) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasQuery()) { hash = (37 * hash) + QUERY_FIELD_NUMBER; hash = (53 * hash) + query_; } hash = (37 * hash) + MODALITY_FIELD_NUMBER; hash = (53 * hash) + modality_; hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1beta1.Presets parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.Presets parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.Presets parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.Presets parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.Presets parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.Presets parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.Presets parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.Presets parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.Presets parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.Presets parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.Presets parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.Presets parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.aiplatform.v1beta1.Presets prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Preset configuration for example-based explanations * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.Presets} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.Presets) com.google.cloud.aiplatform.v1beta1.PresetsOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.ExplanationProto .internal_static_google_cloud_aiplatform_v1beta1_Presets_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.ExplanationProto .internal_static_google_cloud_aiplatform_v1beta1_Presets_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.Presets.class, com.google.cloud.aiplatform.v1beta1.Presets.Builder.class); } // Construct using com.google.cloud.aiplatform.v1beta1.Presets.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; query_ = 0; modality_ = 0; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1beta1.ExplanationProto .internal_static_google_cloud_aiplatform_v1beta1_Presets_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.Presets getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1beta1.Presets.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.Presets build() { com.google.cloud.aiplatform.v1beta1.Presets result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.Presets buildPartial() { com.google.cloud.aiplatform.v1beta1.Presets result = new com.google.cloud.aiplatform.v1beta1.Presets(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.aiplatform.v1beta1.Presets result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.query_ = query_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.modality_ = modality_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1beta1.Presets) { return mergeFrom((com.google.cloud.aiplatform.v1beta1.Presets) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.Presets other) { if (other == com.google.cloud.aiplatform.v1beta1.Presets.getDefaultInstance()) return this; if (other.hasQuery()) { setQuery(other.getQuery()); } if (other.modality_ != 0) { setModalityValue(other.getModalityValue()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { query_ = input.readEnum(); bitField0_ |= 0x00000001; break; } // case 8 case 16: { modality_ = input.readEnum(); bitField0_ |= 0x00000002; break; } // case 16 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int query_ = 0; /** * * * <pre> * Preset option controlling parameters for speed-precision trade-off when * querying for examples. If omitted, defaults to `PRECISE`. * </pre> * * <code>optional .google.cloud.aiplatform.v1beta1.Presets.Query query = 1;</code> * * @return Whether the query field is set. */ @java.lang.Override public boolean hasQuery() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Preset option controlling parameters for speed-precision trade-off when * querying for examples. If omitted, defaults to `PRECISE`. * </pre> * * <code>optional .google.cloud.aiplatform.v1beta1.Presets.Query query = 1;</code> * * @return The enum numeric value on the wire for query. */ @java.lang.Override public int getQueryValue() { return query_; } /** * * * <pre> * Preset option controlling parameters for speed-precision trade-off when * querying for examples. If omitted, defaults to `PRECISE`. * </pre> * * <code>optional .google.cloud.aiplatform.v1beta1.Presets.Query query = 1;</code> * * @param value The enum numeric value on the wire for query to set. * @return This builder for chaining. */ public Builder setQueryValue(int value) { query_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Preset option controlling parameters for speed-precision trade-off when * querying for examples. If omitted, defaults to `PRECISE`. * </pre> * * <code>optional .google.cloud.aiplatform.v1beta1.Presets.Query query = 1;</code> * * @return The query. */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.Presets.Query getQuery() { com.google.cloud.aiplatform.v1beta1.Presets.Query result = com.google.cloud.aiplatform.v1beta1.Presets.Query.forNumber(query_); return result == null ? com.google.cloud.aiplatform.v1beta1.Presets.Query.UNRECOGNIZED : result; } /** * * * <pre> * Preset option controlling parameters for speed-precision trade-off when * querying for examples. If omitted, defaults to `PRECISE`. * </pre> * * <code>optional .google.cloud.aiplatform.v1beta1.Presets.Query query = 1;</code> * * @param value The query to set. * @return This builder for chaining. */ public Builder setQuery(com.google.cloud.aiplatform.v1beta1.Presets.Query value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; query_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Preset option controlling parameters for speed-precision trade-off when * querying for examples. If omitted, defaults to `PRECISE`. * </pre> * * <code>optional .google.cloud.aiplatform.v1beta1.Presets.Query query = 1;</code> * * @return This builder for chaining. */ public Builder clearQuery() { bitField0_ = (bitField0_ & ~0x00000001); query_ = 0; onChanged(); return this; } private int modality_ = 0; /** * * * <pre> * The modality of the uploaded model, which automatically configures the * distance measurement and feature normalization for the underlying example * index and queries. If your model does not precisely fit one of these types, * it is okay to choose the closest type. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.Presets.Modality modality = 2;</code> * * @return The enum numeric value on the wire for modality. */ @java.lang.Override public int getModalityValue() { return modality_; } /** * * * <pre> * The modality of the uploaded model, which automatically configures the * distance measurement and feature normalization for the underlying example * index and queries. If your model does not precisely fit one of these types, * it is okay to choose the closest type. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.Presets.Modality modality = 2;</code> * * @param value The enum numeric value on the wire for modality to set. * @return This builder for chaining. */ public Builder setModalityValue(int value) { modality_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The modality of the uploaded model, which automatically configures the * distance measurement and feature normalization for the underlying example * index and queries. If your model does not precisely fit one of these types, * it is okay to choose the closest type. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.Presets.Modality modality = 2;</code> * * @return The modality. */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.Presets.Modality getModality() { com.google.cloud.aiplatform.v1beta1.Presets.Modality result = com.google.cloud.aiplatform.v1beta1.Presets.Modality.forNumber(modality_); return result == null ? com.google.cloud.aiplatform.v1beta1.Presets.Modality.UNRECOGNIZED : result; } /** * * * <pre> * The modality of the uploaded model, which automatically configures the * distance measurement and feature normalization for the underlying example * index and queries. If your model does not precisely fit one of these types, * it is okay to choose the closest type. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.Presets.Modality modality = 2;</code> * * @param value The modality to set. * @return This builder for chaining. */ public Builder setModality(com.google.cloud.aiplatform.v1beta1.Presets.Modality value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; modality_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * The modality of the uploaded model, which automatically configures the * distance measurement and feature normalization for the underlying example * index and queries. If your model does not precisely fit one of these types, * it is okay to choose the closest type. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.Presets.Modality modality = 2;</code> * * @return This builder for chaining. */ public Builder clearModality() { bitField0_ = (bitField0_ & ~0x00000002); modality_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.Presets) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.Presets) private static final com.google.cloud.aiplatform.v1beta1.Presets DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.Presets(); } public static com.google.cloud.aiplatform.v1beta1.Presets getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<Presets> PARSER = new com.google.protobuf.AbstractParser<Presets>() { @java.lang.Override public Presets parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<Presets> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<Presets> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.Presets getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,154
java-dataplex/proto-google-cloud-dataplex-v1/src/main/java/com/google/cloud/dataplex/v1/UpdateEntryTypeRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dataplex/v1/catalog.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dataplex.v1; /** * * * <pre> * Update EntryType Request. * </pre> * * Protobuf type {@code google.cloud.dataplex.v1.UpdateEntryTypeRequest} */ public final class UpdateEntryTypeRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dataplex.v1.UpdateEntryTypeRequest) UpdateEntryTypeRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateEntryTypeRequest.newBuilder() to construct. private UpdateEntryTypeRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateEntryTypeRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateEntryTypeRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataplex.v1.CatalogProto .internal_static_google_cloud_dataplex_v1_UpdateEntryTypeRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataplex.v1.CatalogProto .internal_static_google_cloud_dataplex_v1_UpdateEntryTypeRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataplex.v1.UpdateEntryTypeRequest.class, com.google.cloud.dataplex.v1.UpdateEntryTypeRequest.Builder.class); } private int bitField0_; public static final int ENTRY_TYPE_FIELD_NUMBER = 1; private com.google.cloud.dataplex.v1.EntryType entryType_; /** * * * <pre> * Required. EntryType Resource. * </pre> * * <code> * .google.cloud.dataplex.v1.EntryType entry_type = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the entryType field is set. */ @java.lang.Override public boolean hasEntryType() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. EntryType Resource. * </pre> * * <code> * .google.cloud.dataplex.v1.EntryType entry_type = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The entryType. */ @java.lang.Override public com.google.cloud.dataplex.v1.EntryType getEntryType() { return entryType_ == null ? com.google.cloud.dataplex.v1.EntryType.getDefaultInstance() : entryType_; } /** * * * <pre> * Required. EntryType Resource. * </pre> * * <code> * .google.cloud.dataplex.v1.EntryType entry_type = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.dataplex.v1.EntryTypeOrBuilder getEntryTypeOrBuilder() { return entryType_ == null ? com.google.cloud.dataplex.v1.EntryType.getDefaultInstance() : entryType_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } public static final int VALIDATE_ONLY_FIELD_NUMBER = 3; private boolean validateOnly_ = false; /** * * * <pre> * Optional. The service validates the request without performing any * mutations. The default is false. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getEntryType()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } if (validateOnly_ != false) { output.writeBool(3, validateOnly_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getEntryType()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } if (validateOnly_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, validateOnly_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dataplex.v1.UpdateEntryTypeRequest)) { return super.equals(obj); } com.google.cloud.dataplex.v1.UpdateEntryTypeRequest other = (com.google.cloud.dataplex.v1.UpdateEntryTypeRequest) obj; if (hasEntryType() != other.hasEntryType()) return false; if (hasEntryType()) { if (!getEntryType().equals(other.getEntryType())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (getValidateOnly() != other.getValidateOnly()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasEntryType()) { hash = (37 * hash) + ENTRY_TYPE_FIELD_NUMBER; hash = (53 * hash) + getEntryType().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (37 * hash) + VALIDATE_ONLY_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getValidateOnly()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dataplex.v1.UpdateEntryTypeRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataplex.v1.UpdateEntryTypeRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataplex.v1.UpdateEntryTypeRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataplex.v1.UpdateEntryTypeRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataplex.v1.UpdateEntryTypeRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataplex.v1.UpdateEntryTypeRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataplex.v1.UpdateEntryTypeRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataplex.v1.UpdateEntryTypeRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataplex.v1.UpdateEntryTypeRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dataplex.v1.UpdateEntryTypeRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataplex.v1.UpdateEntryTypeRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataplex.v1.UpdateEntryTypeRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.dataplex.v1.UpdateEntryTypeRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Update EntryType Request. * </pre> * * Protobuf type {@code google.cloud.dataplex.v1.UpdateEntryTypeRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dataplex.v1.UpdateEntryTypeRequest) com.google.cloud.dataplex.v1.UpdateEntryTypeRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataplex.v1.CatalogProto .internal_static_google_cloud_dataplex_v1_UpdateEntryTypeRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataplex.v1.CatalogProto .internal_static_google_cloud_dataplex_v1_UpdateEntryTypeRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataplex.v1.UpdateEntryTypeRequest.class, com.google.cloud.dataplex.v1.UpdateEntryTypeRequest.Builder.class); } // Construct using com.google.cloud.dataplex.v1.UpdateEntryTypeRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getEntryTypeFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; entryType_ = null; if (entryTypeBuilder_ != null) { entryTypeBuilder_.dispose(); entryTypeBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } validateOnly_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dataplex.v1.CatalogProto .internal_static_google_cloud_dataplex_v1_UpdateEntryTypeRequest_descriptor; } @java.lang.Override public com.google.cloud.dataplex.v1.UpdateEntryTypeRequest getDefaultInstanceForType() { return com.google.cloud.dataplex.v1.UpdateEntryTypeRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dataplex.v1.UpdateEntryTypeRequest build() { com.google.cloud.dataplex.v1.UpdateEntryTypeRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dataplex.v1.UpdateEntryTypeRequest buildPartial() { com.google.cloud.dataplex.v1.UpdateEntryTypeRequest result = new com.google.cloud.dataplex.v1.UpdateEntryTypeRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.dataplex.v1.UpdateEntryTypeRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.entryType_ = entryTypeBuilder_ == null ? entryType_ : entryTypeBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.validateOnly_ = validateOnly_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dataplex.v1.UpdateEntryTypeRequest) { return mergeFrom((com.google.cloud.dataplex.v1.UpdateEntryTypeRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dataplex.v1.UpdateEntryTypeRequest other) { if (other == com.google.cloud.dataplex.v1.UpdateEntryTypeRequest.getDefaultInstance()) return this; if (other.hasEntryType()) { mergeEntryType(other.getEntryType()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } if (other.getValidateOnly() != false) { setValidateOnly(other.getValidateOnly()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getEntryTypeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 24: { validateOnly_ = input.readBool(); bitField0_ |= 0x00000004; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.dataplex.v1.EntryType entryType_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataplex.v1.EntryType, com.google.cloud.dataplex.v1.EntryType.Builder, com.google.cloud.dataplex.v1.EntryTypeOrBuilder> entryTypeBuilder_; /** * * * <pre> * Required. EntryType Resource. * </pre> * * <code> * .google.cloud.dataplex.v1.EntryType entry_type = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the entryType field is set. */ public boolean hasEntryType() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. EntryType Resource. * </pre> * * <code> * .google.cloud.dataplex.v1.EntryType entry_type = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The entryType. */ public com.google.cloud.dataplex.v1.EntryType getEntryType() { if (entryTypeBuilder_ == null) { return entryType_ == null ? com.google.cloud.dataplex.v1.EntryType.getDefaultInstance() : entryType_; } else { return entryTypeBuilder_.getMessage(); } } /** * * * <pre> * Required. EntryType Resource. * </pre> * * <code> * .google.cloud.dataplex.v1.EntryType entry_type = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setEntryType(com.google.cloud.dataplex.v1.EntryType value) { if (entryTypeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } entryType_ = value; } else { entryTypeBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. EntryType Resource. * </pre> * * <code> * .google.cloud.dataplex.v1.EntryType entry_type = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setEntryType(com.google.cloud.dataplex.v1.EntryType.Builder builderForValue) { if (entryTypeBuilder_ == null) { entryType_ = builderForValue.build(); } else { entryTypeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. EntryType Resource. * </pre> * * <code> * .google.cloud.dataplex.v1.EntryType entry_type = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeEntryType(com.google.cloud.dataplex.v1.EntryType value) { if (entryTypeBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && entryType_ != null && entryType_ != com.google.cloud.dataplex.v1.EntryType.getDefaultInstance()) { getEntryTypeBuilder().mergeFrom(value); } else { entryType_ = value; } } else { entryTypeBuilder_.mergeFrom(value); } if (entryType_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. EntryType Resource. * </pre> * * <code> * .google.cloud.dataplex.v1.EntryType entry_type = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearEntryType() { bitField0_ = (bitField0_ & ~0x00000001); entryType_ = null; if (entryTypeBuilder_ != null) { entryTypeBuilder_.dispose(); entryTypeBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. EntryType Resource. * </pre> * * <code> * .google.cloud.dataplex.v1.EntryType entry_type = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dataplex.v1.EntryType.Builder getEntryTypeBuilder() { bitField0_ |= 0x00000001; onChanged(); return getEntryTypeFieldBuilder().getBuilder(); } /** * * * <pre> * Required. EntryType Resource. * </pre> * * <code> * .google.cloud.dataplex.v1.EntryType entry_type = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dataplex.v1.EntryTypeOrBuilder getEntryTypeOrBuilder() { if (entryTypeBuilder_ != null) { return entryTypeBuilder_.getMessageOrBuilder(); } else { return entryType_ == null ? com.google.cloud.dataplex.v1.EntryType.getDefaultInstance() : entryType_; } } /** * * * <pre> * Required. EntryType Resource. * </pre> * * <code> * .google.cloud.dataplex.v1.EntryType entry_type = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataplex.v1.EntryType, com.google.cloud.dataplex.v1.EntryType.Builder, com.google.cloud.dataplex.v1.EntryTypeOrBuilder> getEntryTypeFieldBuilder() { if (entryTypeBuilder_ == null) { entryTypeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataplex.v1.EntryType, com.google.cloud.dataplex.v1.EntryType.Builder, com.google.cloud.dataplex.v1.EntryTypeOrBuilder>( getEntryType(), getParentForChildren(), isClean()); entryType_ = null; } return entryTypeBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } private boolean validateOnly_; /** * * * <pre> * Optional. The service validates the request without performing any * mutations. The default is false. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } /** * * * <pre> * Optional. The service validates the request without performing any * mutations. The default is false. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The validateOnly to set. * @return This builder for chaining. */ public Builder setValidateOnly(boolean value) { validateOnly_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. The service validates the request without performing any * mutations. The default is false. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearValidateOnly() { bitField0_ = (bitField0_ & ~0x00000004); validateOnly_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dataplex.v1.UpdateEntryTypeRequest) } // @@protoc_insertion_point(class_scope:google.cloud.dataplex.v1.UpdateEntryTypeRequest) private static final com.google.cloud.dataplex.v1.UpdateEntryTypeRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dataplex.v1.UpdateEntryTypeRequest(); } public static com.google.cloud.dataplex.v1.UpdateEntryTypeRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateEntryTypeRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateEntryTypeRequest>() { @java.lang.Override public UpdateEntryTypeRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateEntryTypeRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateEntryTypeRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dataplex.v1.UpdateEntryTypeRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
openjdk/jdk8
35,304
jdk/src/share/classes/java/util/concurrent/LinkedBlockingQueue.java
/* * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * This file is available under and governed by the GNU General Public * License version 2 only, as published by the Free Software Foundation. * However, the following notice accompanied the original version of this * file: * * Written by Doug Lea with assistance from members of JCP JSR-166 * Expert Group and released to the public domain, as explained at * http://creativecommons.org/publicdomain/zero/1.0/ */ package java.util.concurrent; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.ReentrantLock; import java.util.AbstractQueue; import java.util.Collection; import java.util.Iterator; import java.util.NoSuchElementException; import java.util.Spliterator; import java.util.Spliterators; import java.util.function.Consumer; /** * An optionally-bounded {@linkplain BlockingQueue blocking queue} based on * linked nodes. * This queue orders elements FIFO (first-in-first-out). * The <em>head</em> of the queue is that element that has been on the * queue the longest time. * The <em>tail</em> of the queue is that element that has been on the * queue the shortest time. New elements * are inserted at the tail of the queue, and the queue retrieval * operations obtain elements at the head of the queue. * Linked queues typically have higher throughput than array-based queues but * less predictable performance in most concurrent applications. * * <p>The optional capacity bound constructor argument serves as a * way to prevent excessive queue expansion. The capacity, if unspecified, * is equal to {@link Integer#MAX_VALUE}. Linked nodes are * dynamically created upon each insertion unless this would bring the * queue above capacity. * * <p>This class and its iterator implement all of the * <em>optional</em> methods of the {@link Collection} and {@link * Iterator} interfaces. * * <p>This class is a member of the * <a href="{@docRoot}/../technotes/guides/collections/index.html"> * Java Collections Framework</a>. * * @since 1.5 * @author Doug Lea * @param <E> the type of elements held in this collection */ public class LinkedBlockingQueue<E> extends AbstractQueue<E> implements BlockingQueue<E>, java.io.Serializable { private static final long serialVersionUID = -6903933977591709194L; /* * A variant of the "two lock queue" algorithm. The putLock gates * entry to put (and offer), and has an associated condition for * waiting puts. Similarly for the takeLock. The "count" field * that they both rely on is maintained as an atomic to avoid * needing to get both locks in most cases. Also, to minimize need * for puts to get takeLock and vice-versa, cascading notifies are * used. When a put notices that it has enabled at least one take, * it signals taker. That taker in turn signals others if more * items have been entered since the signal. And symmetrically for * takes signalling puts. Operations such as remove(Object) and * iterators acquire both locks. * * Visibility between writers and readers is provided as follows: * * Whenever an element is enqueued, the putLock is acquired and * count updated. A subsequent reader guarantees visibility to the * enqueued Node by either acquiring the putLock (via fullyLock) * or by acquiring the takeLock, and then reading n = count.get(); * this gives visibility to the first n items. * * To implement weakly consistent iterators, it appears we need to * keep all Nodes GC-reachable from a predecessor dequeued Node. * That would cause two problems: * - allow a rogue Iterator to cause unbounded memory retention * - cause cross-generational linking of old Nodes to new Nodes if * a Node was tenured while live, which generational GCs have a * hard time dealing with, causing repeated major collections. * However, only non-deleted Nodes need to be reachable from * dequeued Nodes, and reachability does not necessarily have to * be of the kind understood by the GC. We use the trick of * linking a Node that has just been dequeued to itself. Such a * self-link implicitly means to advance to head.next. */ /** * Linked list node class */ static class Node<E> { E item; /** * One of: * - the real successor Node * - this Node, meaning the successor is head.next * - null, meaning there is no successor (this is the last node) */ Node<E> next; Node(E x) { item = x; } } /** The capacity bound, or Integer.MAX_VALUE if none */ private final int capacity; /** Current number of elements */ private final AtomicInteger count = new AtomicInteger(); /** * Head of linked list. * Invariant: head.item == null */ transient Node<E> head; /** * Tail of linked list. * Invariant: last.next == null */ private transient Node<E> last; /** Lock held by take, poll, etc */ private final ReentrantLock takeLock = new ReentrantLock(); /** Wait queue for waiting takes */ private final Condition notEmpty = takeLock.newCondition(); /** Lock held by put, offer, etc */ private final ReentrantLock putLock = new ReentrantLock(); /** Wait queue for waiting puts */ private final Condition notFull = putLock.newCondition(); /** * Signals a waiting take. Called only from put/offer (which do not * otherwise ordinarily lock takeLock.) */ private void signalNotEmpty() { final ReentrantLock takeLock = this.takeLock; takeLock.lock(); try { notEmpty.signal(); } finally { takeLock.unlock(); } } /** * Signals a waiting put. Called only from take/poll. */ private void signalNotFull() { final ReentrantLock putLock = this.putLock; putLock.lock(); try { notFull.signal(); } finally { putLock.unlock(); } } /** * Links node at end of queue. * * @param node the node */ private void enqueue(Node<E> node) { // assert putLock.isHeldByCurrentThread(); // assert last.next == null; last = last.next = node; } /** * Removes a node from head of queue. * * @return the node */ private E dequeue() { // assert takeLock.isHeldByCurrentThread(); // assert head.item == null; Node<E> h = head; Node<E> first = h.next; h.next = h; // help GC head = first; E x = first.item; first.item = null; return x; } /** * Locks to prevent both puts and takes. */ void fullyLock() { putLock.lock(); takeLock.lock(); } /** * Unlocks to allow both puts and takes. */ void fullyUnlock() { takeLock.unlock(); putLock.unlock(); } // /** // * Tells whether both locks are held by current thread. // */ // boolean isFullyLocked() { // return (putLock.isHeldByCurrentThread() && // takeLock.isHeldByCurrentThread()); // } /** * Creates a {@code LinkedBlockingQueue} with a capacity of * {@link Integer#MAX_VALUE}. */ public LinkedBlockingQueue() { this(Integer.MAX_VALUE); } /** * Creates a {@code LinkedBlockingQueue} with the given (fixed) capacity. * * @param capacity the capacity of this queue * @throws IllegalArgumentException if {@code capacity} is not greater * than zero */ public LinkedBlockingQueue(int capacity) { if (capacity <= 0) throw new IllegalArgumentException(); this.capacity = capacity; last = head = new Node<E>(null); } /** * Creates a {@code LinkedBlockingQueue} with a capacity of * {@link Integer#MAX_VALUE}, initially containing the elements of the * given collection, * added in traversal order of the collection's iterator. * * @param c the collection of elements to initially contain * @throws NullPointerException if the specified collection or any * of its elements are null */ public LinkedBlockingQueue(Collection<? extends E> c) { this(Integer.MAX_VALUE); final ReentrantLock putLock = this.putLock; putLock.lock(); // Never contended, but necessary for visibility try { int n = 0; for (E e : c) { if (e == null) throw new NullPointerException(); if (n == capacity) throw new IllegalStateException("Queue full"); enqueue(new Node<E>(e)); ++n; } count.set(n); } finally { putLock.unlock(); } } // this doc comment is overridden to remove the reference to collections // greater in size than Integer.MAX_VALUE /** * Returns the number of elements in this queue. * * @return the number of elements in this queue */ public int size() { return count.get(); } // this doc comment is a modified copy of the inherited doc comment, // without the reference to unlimited queues. /** * Returns the number of additional elements that this queue can ideally * (in the absence of memory or resource constraints) accept without * blocking. This is always equal to the initial capacity of this queue * less the current {@code size} of this queue. * * <p>Note that you <em>cannot</em> always tell if an attempt to insert * an element will succeed by inspecting {@code remainingCapacity} * because it may be the case that another thread is about to * insert or remove an element. */ public int remainingCapacity() { return capacity - count.get(); } /** * Inserts the specified element at the tail of this queue, waiting if * necessary for space to become available. * * @throws InterruptedException {@inheritDoc} * @throws NullPointerException {@inheritDoc} */ public void put(E e) throws InterruptedException { if (e == null) throw new NullPointerException(); // Note: convention in all put/take/etc is to preset local var // holding count negative to indicate failure unless set. int c = -1; Node<E> node = new Node<E>(e); final ReentrantLock putLock = this.putLock; final AtomicInteger count = this.count; putLock.lockInterruptibly(); try { /* * Note that count is used in wait guard even though it is * not protected by lock. This works because count can * only decrease at this point (all other puts are shut * out by lock), and we (or some other waiting put) are * signalled if it ever changes from capacity. Similarly * for all other uses of count in other wait guards. */ while (count.get() == capacity) { notFull.await(); } enqueue(node); c = count.getAndIncrement(); if (c + 1 < capacity) notFull.signal(); } finally { putLock.unlock(); } if (c == 0) signalNotEmpty(); } /** * Inserts the specified element at the tail of this queue, waiting if * necessary up to the specified wait time for space to become available. * * @return {@code true} if successful, or {@code false} if * the specified waiting time elapses before space is available * @throws InterruptedException {@inheritDoc} * @throws NullPointerException {@inheritDoc} */ public boolean offer(E e, long timeout, TimeUnit unit) throws InterruptedException { if (e == null) throw new NullPointerException(); long nanos = unit.toNanos(timeout); int c = -1; final ReentrantLock putLock = this.putLock; final AtomicInteger count = this.count; putLock.lockInterruptibly(); try { while (count.get() == capacity) { if (nanos <= 0) return false; nanos = notFull.awaitNanos(nanos); } enqueue(new Node<E>(e)); c = count.getAndIncrement(); if (c + 1 < capacity) notFull.signal(); } finally { putLock.unlock(); } if (c == 0) signalNotEmpty(); return true; } /** * Inserts the specified element at the tail of this queue if it is * possible to do so immediately without exceeding the queue's capacity, * returning {@code true} upon success and {@code false} if this queue * is full. * When using a capacity-restricted queue, this method is generally * preferable to method {@link BlockingQueue#add add}, which can fail to * insert an element only by throwing an exception. * * @throws NullPointerException if the specified element is null */ public boolean offer(E e) { if (e == null) throw new NullPointerException(); final AtomicInteger count = this.count; if (count.get() == capacity) return false; int c = -1; Node<E> node = new Node<E>(e); final ReentrantLock putLock = this.putLock; putLock.lock(); try { if (count.get() < capacity) { enqueue(node); c = count.getAndIncrement(); if (c + 1 < capacity) notFull.signal(); } } finally { putLock.unlock(); } if (c == 0) signalNotEmpty(); return c >= 0; } public E take() throws InterruptedException { E x; int c = -1; final AtomicInteger count = this.count; final ReentrantLock takeLock = this.takeLock; takeLock.lockInterruptibly(); try { while (count.get() == 0) { notEmpty.await(); } x = dequeue(); c = count.getAndDecrement(); if (c > 1) notEmpty.signal(); } finally { takeLock.unlock(); } if (c == capacity) signalNotFull(); return x; } public E poll(long timeout, TimeUnit unit) throws InterruptedException { E x = null; int c = -1; long nanos = unit.toNanos(timeout); final AtomicInteger count = this.count; final ReentrantLock takeLock = this.takeLock; takeLock.lockInterruptibly(); try { while (count.get() == 0) { if (nanos <= 0) return null; nanos = notEmpty.awaitNanos(nanos); } x = dequeue(); c = count.getAndDecrement(); if (c > 1) notEmpty.signal(); } finally { takeLock.unlock(); } if (c == capacity) signalNotFull(); return x; } public E poll() { final AtomicInteger count = this.count; if (count.get() == 0) return null; E x = null; int c = -1; final ReentrantLock takeLock = this.takeLock; takeLock.lock(); try { if (count.get() > 0) { x = dequeue(); c = count.getAndDecrement(); if (c > 1) notEmpty.signal(); } } finally { takeLock.unlock(); } if (c == capacity) signalNotFull(); return x; } public E peek() { if (count.get() == 0) return null; final ReentrantLock takeLock = this.takeLock; takeLock.lock(); try { Node<E> first = head.next; if (first == null) return null; else return first.item; } finally { takeLock.unlock(); } } /** * Unlinks interior Node p with predecessor trail. */ void unlink(Node<E> p, Node<E> trail) { // assert isFullyLocked(); // p.next is not changed, to allow iterators that are // traversing p to maintain their weak-consistency guarantee. p.item = null; trail.next = p.next; if (last == p) last = trail; if (count.getAndDecrement() == capacity) notFull.signal(); } /** * Removes a single instance of the specified element from this queue, * if it is present. More formally, removes an element {@code e} such * that {@code o.equals(e)}, if this queue contains one or more such * elements. * Returns {@code true} if this queue contained the specified element * (or equivalently, if this queue changed as a result of the call). * * @param o element to be removed from this queue, if present * @return {@code true} if this queue changed as a result of the call */ public boolean remove(Object o) { if (o == null) return false; fullyLock(); try { for (Node<E> trail = head, p = trail.next; p != null; trail = p, p = p.next) { if (o.equals(p.item)) { unlink(p, trail); return true; } } return false; } finally { fullyUnlock(); } } /** * Returns {@code true} if this queue contains the specified element. * More formally, returns {@code true} if and only if this queue contains * at least one element {@code e} such that {@code o.equals(e)}. * * @param o object to be checked for containment in this queue * @return {@code true} if this queue contains the specified element */ public boolean contains(Object o) { if (o == null) return false; fullyLock(); try { for (Node<E> p = head.next; p != null; p = p.next) if (o.equals(p.item)) return true; return false; } finally { fullyUnlock(); } } /** * Returns an array containing all of the elements in this queue, in * proper sequence. * * <p>The returned array will be "safe" in that no references to it are * maintained by this queue. (In other words, this method must allocate * a new array). The caller is thus free to modify the returned array. * * <p>This method acts as bridge between array-based and collection-based * APIs. * * @return an array containing all of the elements in this queue */ public Object[] toArray() { fullyLock(); try { int size = count.get(); Object[] a = new Object[size]; int k = 0; for (Node<E> p = head.next; p != null; p = p.next) a[k++] = p.item; return a; } finally { fullyUnlock(); } } /** * Returns an array containing all of the elements in this queue, in * proper sequence; the runtime type of the returned array is that of * the specified array. If the queue fits in the specified array, it * is returned therein. Otherwise, a new array is allocated with the * runtime type of the specified array and the size of this queue. * * <p>If this queue fits in the specified array with room to spare * (i.e., the array has more elements than this queue), the element in * the array immediately following the end of the queue is set to * {@code null}. * * <p>Like the {@link #toArray()} method, this method acts as bridge between * array-based and collection-based APIs. Further, this method allows * precise control over the runtime type of the output array, and may, * under certain circumstances, be used to save allocation costs. * * <p>Suppose {@code x} is a queue known to contain only strings. * The following code can be used to dump the queue into a newly * allocated array of {@code String}: * * <pre> {@code String[] y = x.toArray(new String[0]);}</pre> * * Note that {@code toArray(new Object[0])} is identical in function to * {@code toArray()}. * * @param a the array into which the elements of the queue are to * be stored, if it is big enough; otherwise, a new array of the * same runtime type is allocated for this purpose * @return an array containing all of the elements in this queue * @throws ArrayStoreException if the runtime type of the specified array * is not a supertype of the runtime type of every element in * this queue * @throws NullPointerException if the specified array is null */ @SuppressWarnings("unchecked") public <T> T[] toArray(T[] a) { fullyLock(); try { int size = count.get(); if (a.length < size) a = (T[])java.lang.reflect.Array.newInstance (a.getClass().getComponentType(), size); int k = 0; for (Node<E> p = head.next; p != null; p = p.next) a[k++] = (T)p.item; if (a.length > k) a[k] = null; return a; } finally { fullyUnlock(); } } public String toString() { fullyLock(); try { Node<E> p = head.next; if (p == null) return "[]"; StringBuilder sb = new StringBuilder(); sb.append('['); for (;;) { E e = p.item; sb.append(e == this ? "(this Collection)" : e); p = p.next; if (p == null) return sb.append(']').toString(); sb.append(',').append(' '); } } finally { fullyUnlock(); } } /** * Atomically removes all of the elements from this queue. * The queue will be empty after this call returns. */ public void clear() { fullyLock(); try { for (Node<E> p, h = head; (p = h.next) != null; h = p) { h.next = h; p.item = null; } head = last; // assert head.item == null && head.next == null; if (count.getAndSet(0) == capacity) notFull.signal(); } finally { fullyUnlock(); } } /** * @throws UnsupportedOperationException {@inheritDoc} * @throws ClassCastException {@inheritDoc} * @throws NullPointerException {@inheritDoc} * @throws IllegalArgumentException {@inheritDoc} */ public int drainTo(Collection<? super E> c) { return drainTo(c, Integer.MAX_VALUE); } /** * @throws UnsupportedOperationException {@inheritDoc} * @throws ClassCastException {@inheritDoc} * @throws NullPointerException {@inheritDoc} * @throws IllegalArgumentException {@inheritDoc} */ public int drainTo(Collection<? super E> c, int maxElements) { if (c == null) throw new NullPointerException(); if (c == this) throw new IllegalArgumentException(); if (maxElements <= 0) return 0; boolean signalNotFull = false; final ReentrantLock takeLock = this.takeLock; takeLock.lock(); try { int n = Math.min(maxElements, count.get()); // count.get provides visibility to first n Nodes Node<E> h = head; int i = 0; try { while (i < n) { Node<E> p = h.next; c.add(p.item); p.item = null; h.next = h; h = p; ++i; } return n; } finally { // Restore invariants even if c.add() threw if (i > 0) { // assert h.item == null; head = h; signalNotFull = (count.getAndAdd(-i) == capacity); } } } finally { takeLock.unlock(); if (signalNotFull) signalNotFull(); } } /** * Returns an iterator over the elements in this queue in proper sequence. * The elements will be returned in order from first (head) to last (tail). * * <p>The returned iterator is * <a href="package-summary.html#Weakly"><i>weakly consistent</i></a>. * * @return an iterator over the elements in this queue in proper sequence */ public Iterator<E> iterator() { return new Itr(); } private class Itr implements Iterator<E> { /* * Basic weakly-consistent iterator. At all times hold the next * item to hand out so that if hasNext() reports true, we will * still have it to return even if lost race with a take etc. */ private Node<E> current; private Node<E> lastRet; private E currentElement; Itr() { fullyLock(); try { current = head.next; if (current != null) currentElement = current.item; } finally { fullyUnlock(); } } public boolean hasNext() { return current != null; } /** * Returns the next live successor of p, or null if no such. * * Unlike other traversal methods, iterators need to handle both: * - dequeued nodes (p.next == p) * - (possibly multiple) interior removed nodes (p.item == null) */ private Node<E> nextNode(Node<E> p) { for (;;) { Node<E> s = p.next; if (s == p) return head.next; if (s == null || s.item != null) return s; p = s; } } public E next() { fullyLock(); try { if (current == null) throw new NoSuchElementException(); E x = currentElement; lastRet = current; current = nextNode(current); currentElement = (current == null) ? null : current.item; return x; } finally { fullyUnlock(); } } public void remove() { if (lastRet == null) throw new IllegalStateException(); fullyLock(); try { Node<E> node = lastRet; lastRet = null; for (Node<E> trail = head, p = trail.next; p != null; trail = p, p = p.next) { if (p == node) { unlink(p, trail); break; } } } finally { fullyUnlock(); } } } /** A customized variant of Spliterators.IteratorSpliterator */ static final class LBQSpliterator<E> implements Spliterator<E> { static final int MAX_BATCH = 1 << 25; // max batch array size; final LinkedBlockingQueue<E> queue; Node<E> current; // current node; null until initialized int batch; // batch size for splits boolean exhausted; // true when no more nodes long est; // size estimate LBQSpliterator(LinkedBlockingQueue<E> queue) { this.queue = queue; this.est = queue.size(); } public long estimateSize() { return est; } public Spliterator<E> trySplit() { Node<E> h; final LinkedBlockingQueue<E> q = this.queue; int b = batch; int n = (b <= 0) ? 1 : (b >= MAX_BATCH) ? MAX_BATCH : b + 1; if (!exhausted && ((h = current) != null || (h = q.head.next) != null) && h.next != null) { Object[] a = new Object[n]; int i = 0; Node<E> p = current; q.fullyLock(); try { if (p != null || (p = q.head.next) != null) { do { if ((a[i] = p.item) != null) ++i; } while ((p = p.next) != null && i < n); } } finally { q.fullyUnlock(); } if ((current = p) == null) { est = 0L; exhausted = true; } else if ((est -= i) < 0L) est = 0L; if (i > 0) { batch = i; return Spliterators.spliterator (a, 0, i, Spliterator.ORDERED | Spliterator.NONNULL | Spliterator.CONCURRENT); } } return null; } public void forEachRemaining(Consumer<? super E> action) { if (action == null) throw new NullPointerException(); final LinkedBlockingQueue<E> q = this.queue; if (!exhausted) { exhausted = true; Node<E> p = current; do { E e = null; q.fullyLock(); try { if (p == null) p = q.head.next; while (p != null) { e = p.item; p = p.next; if (e != null) break; } } finally { q.fullyUnlock(); } if (e != null) action.accept(e); } while (p != null); } } public boolean tryAdvance(Consumer<? super E> action) { if (action == null) throw new NullPointerException(); final LinkedBlockingQueue<E> q = this.queue; if (!exhausted) { E e = null; q.fullyLock(); try { if (current == null) current = q.head.next; while (current != null) { e = current.item; current = current.next; if (e != null) break; } } finally { q.fullyUnlock(); } if (current == null) exhausted = true; if (e != null) { action.accept(e); return true; } } return false; } public int characteristics() { return Spliterator.ORDERED | Spliterator.NONNULL | Spliterator.CONCURRENT; } } /** * Returns a {@link Spliterator} over the elements in this queue. * * <p>The returned spliterator is * <a href="package-summary.html#Weakly"><i>weakly consistent</i></a>. * * <p>The {@code Spliterator} reports {@link Spliterator#CONCURRENT}, * {@link Spliterator#ORDERED}, and {@link Spliterator#NONNULL}. * * @implNote * The {@code Spliterator} implements {@code trySplit} to permit limited * parallelism. * * @return a {@code Spliterator} over the elements in this queue * @since 1.8 */ public Spliterator<E> spliterator() { return new LBQSpliterator<E>(this); } /** * Saves this queue to a stream (that is, serializes it). * * @param s the stream * @throws java.io.IOException if an I/O error occurs * @serialData The capacity is emitted (int), followed by all of * its elements (each an {@code Object}) in the proper order, * followed by a null */ private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException { fullyLock(); try { // Write out any hidden stuff, plus capacity s.defaultWriteObject(); // Write out all elements in the proper order. for (Node<E> p = head.next; p != null; p = p.next) s.writeObject(p.item); // Use trailing null as sentinel s.writeObject(null); } finally { fullyUnlock(); } } /** * Reconstitutes this queue from a stream (that is, deserializes it). * @param s the stream * @throws ClassNotFoundException if the class of a serialized object * could not be found * @throws java.io.IOException if an I/O error occurs */ private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException { // Read in capacity, and any hidden stuff s.defaultReadObject(); count.set(0); last = head = new Node<E>(null); // Read in all elements and place in queue for (;;) { @SuppressWarnings("unchecked") E item = (E)s.readObject(); if (item == null) break; add(item); } } }
apache/spark-kubernetes-operator
35,606
spark-operator/src/test/java/org/apache/spark/k8s/operator/reconciler/reconcilesteps/AppCleanUpStepTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.spark.k8s.operator.reconciler.reconcilesteps; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; import java.time.Duration; import java.time.Instant; import java.util.List; import java.util.Optional; import io.fabric8.kubernetes.api.model.ConfigMap; import io.fabric8.kubernetes.api.model.Pod; import io.fabric8.kubernetes.client.KubernetesClient; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; import org.mockito.MockedStatic; import org.mockito.Mockito; import org.apache.spark.k8s.operator.Constants; import org.apache.spark.k8s.operator.SparkApplication; import org.apache.spark.k8s.operator.context.SparkAppContext; import org.apache.spark.k8s.operator.reconciler.ReconcileProgress; import org.apache.spark.k8s.operator.spec.ApplicationSpec; import org.apache.spark.k8s.operator.spec.ApplicationTolerations; import org.apache.spark.k8s.operator.spec.ResourceRetainPolicy; import org.apache.spark.k8s.operator.status.ApplicationState; import org.apache.spark.k8s.operator.status.ApplicationStateSummary; import org.apache.spark.k8s.operator.status.ApplicationStatus; import org.apache.spark.k8s.operator.utils.ReconcilerUtils; import org.apache.spark.k8s.operator.utils.SparkAppStatusRecorder; import org.apache.spark.k8s.operator.utils.SparkAppStatusUtils; @SuppressWarnings("PMD.NcssCount") class AppCleanUpStepTest { private final ApplicationSpec alwaysRetain = ApplicationSpec.builder() .applicationTolerations( ApplicationTolerations.builder() .resourceRetainPolicy(ResourceRetainPolicy.Always) .build()) .build(); private final ApplicationSpec neverRetain = ApplicationSpec.builder() .applicationTolerations( ApplicationTolerations.builder() .resourceRetainPolicy(ResourceRetainPolicy.Never) .build()) .build(); private final ApplicationSpec exceedRetainDuration = ApplicationSpec.builder() .applicationTolerations( ApplicationTolerations.builder() .resourceRetainPolicy(ResourceRetainPolicy.Always) .resourceRetainDurationMillis(1L) .build()) .build(); private final ApplicationSpec exceedRetainDurationFromTtl = ApplicationSpec.builder() .applicationTolerations( ApplicationTolerations.builder() .resourceRetainPolicy(ResourceRetainPolicy.Always) .ttlAfterStopMillis(1L) .build()) .build(); private final ApplicationSpec notExceedRetainDuration = ApplicationSpec.builder() .applicationTolerations( ApplicationTolerations.builder() .resourceRetainPolicy(ResourceRetainPolicy.Always) .resourceRetainDurationMillis(24 * 60 * 60 * 1000L) .build()) .build(); private final ApplicationSpec notExceedTtl = ApplicationSpec.builder() .applicationTolerations( ApplicationTolerations.builder() .resourceRetainPolicy(ResourceRetainPolicy.Always) .ttlAfterStopMillis(24 * 60 * 60 * 1000L) .build()) .build(); private final List<ApplicationSpec> specs = List.of( alwaysRetain, neverRetain, exceedRetainDuration, exceedRetainDurationFromTtl, notExceedRetainDuration, notExceedTtl); @Test void enableForceDelete() { AppCleanUpStep appCleanUpStep = new AppCleanUpStep(); SparkApplication app = new SparkApplication(); app.getStatus() .getCurrentState() .setLastTransitionTime(Instant.now().minusSeconds(5).toString()); app.getSpec() .getApplicationTolerations() .getApplicationTimeoutConfig() .setForceTerminationGracePeriodMillis(3000L); assertTrue(appCleanUpStep.enableForceDelete(app)); } @Test void routineCleanupForRunningAppExpectNoAction() { SparkAppStatusRecorder mockRecorder = mock(SparkAppStatusRecorder.class); AppCleanUpStep routineCheck = new AppCleanUpStep(); for (ApplicationStateSummary stateSummary : ApplicationStateSummary.values()) { if (!stateSummary.isStopping() && !stateSummary.isTerminated()) { ApplicationStatus status = prepareApplicationStatus(stateSummary); ApplicationSpec spec = ApplicationSpec.builder().build(); SparkApplication mockApp = mock(SparkApplication.class); when(mockApp.getStatus()).thenReturn(status); when(mockApp.getSpec()).thenReturn(spec); SparkAppContext mockAppContext = mock(SparkAppContext.class); when(mockAppContext.getResource()).thenReturn(mockApp); ReconcileProgress progress = routineCheck.reconcile(mockAppContext, mockRecorder); Assertions.assertEquals(ReconcileProgress.proceed(), progress); verify(mockAppContext).getResource(); verify(mockApp).getSpec(); verify(mockApp).getStatus(); verifyNoMoreInteractions(mockAppContext, mockRecorder, mockApp); } } } @Test void onDemandCleanupForRunningAppExpectDelete() { SparkAppStatusRecorder mockRecorder = mock(SparkAppStatusRecorder.class); AppCleanUpStep cleanUpWithReason = new AppCleanUpStep(SparkAppStatusUtils::appCancelled); for (ApplicationStateSummary stateSummary : ApplicationStateSummary.values()) { if (!stateSummary.isStopping() && !stateSummary.isTerminated()) { ApplicationStatus status = prepareApplicationStatus(stateSummary); ApplicationSpec spec = ApplicationSpec.builder().build(); SparkApplication mockApp = mock(SparkApplication.class); when(mockApp.getStatus()).thenReturn(status); when(mockApp.getSpec()).thenReturn(spec); SparkAppContext mockAppContext = mock(SparkAppContext.class); when(mockAppContext.getResource()).thenReturn(mockApp); KubernetesClient mockClient = mock(KubernetesClient.class); when(mockAppContext.getClient()).thenReturn(mockClient); Pod driverPod = mock(Pod.class); when(mockAppContext.getDriverPod()).thenReturn(Optional.of(driverPod)); when(mockAppContext.getDriverPreResourcesSpec()).thenReturn(List.of()); when(mockAppContext.getDriverResourcesSpec()).thenReturn(List.of()); when(mockRecorder.persistStatus(eq(mockAppContext), any())).thenReturn(true); when(mockRecorder.appendNewStateAndPersist(eq(mockAppContext), any())).thenReturn(true); try (MockedStatic<ReconcilerUtils> utils = Mockito.mockStatic(ReconcilerUtils.class)) { ReconcileProgress progress = cleanUpWithReason.reconcile(mockAppContext, mockRecorder); utils.verify(() -> ReconcilerUtils.deleteResourceIfExists(mockClient, driverPod, false)); Assertions.assertEquals( ReconcileProgress.completeAndRequeueAfter(Duration.ofMillis(2000)), progress); } verify(mockAppContext, times(1)).getResource(); verify(mockApp, times(2)).getSpec(); verify(mockApp, times(2)).getStatus(); verify(mockAppContext).getClient(); verify(mockAppContext).getDriverPod(); ArgumentCaptor<ApplicationState> captor = ArgumentCaptor.forClass(ApplicationState.class); verify(mockRecorder).appendNewStateAndPersist(eq(mockAppContext), captor.capture()); ApplicationState appState = captor.getValue(); Assertions.assertEquals( ApplicationStateSummary.ResourceReleased, appState.getCurrentStateSummary()); Assertions.assertEquals(Constants.APP_CANCELLED_MESSAGE, appState.getMessage()); verifyNoMoreInteractions(mockAppContext, mockRecorder, mockApp, mockClient, driverPod); } } } @Test void routineCleanupForTerminatedAppExpectNoAction() { SparkAppStatusRecorder mockRecorder = mock(SparkAppStatusRecorder.class); AppCleanUpStep routineCheck = new AppCleanUpStep(); for (ApplicationStateSummary stateSummary : ApplicationStateSummary.values()) { if (stateSummary.isTerminated()) { ApplicationStatus status = prepareApplicationStatus(stateSummary); SparkApplication mockApp = mock(SparkApplication.class); ApplicationSpec spec = ApplicationSpec.builder().build(); when(mockApp.getStatus()).thenReturn(status); SparkAppContext mockAppContext = mock(SparkAppContext.class); when(mockAppContext.getResource()).thenReturn(mockApp); when(mockApp.getSpec()).thenReturn(spec); ReconcileProgress progress = routineCheck.reconcile(mockAppContext, mockRecorder); Assertions.assertEquals(ReconcileProgress.completeAndNoRequeue(), progress); verify(mockAppContext, times(1)).getResource(); verify(mockApp, times(2)).getSpec(); verify(mockApp, times(2)).getStatus(); verify(mockAppContext).getClient(); verify(mockRecorder).removeCachedStatus(mockApp); verifyNoMoreInteractions(mockAppContext, mockRecorder, mockApp); } } } @Test void onDemandCleanupForTerminatedAppExpectNoAction() { SparkAppStatusRecorder mockRecorder = mock(SparkAppStatusRecorder.class); AppCleanUpStep cleanUpWithReason = new AppCleanUpStep(SparkAppStatusUtils::appCancelled); ApplicationStatus status = prepareApplicationStatus(ApplicationStateSummary.ResourceReleased); SparkApplication mockApp = mock(SparkApplication.class); ApplicationSpec spec = ApplicationSpec.builder().build(); when(mockApp.getStatus()).thenReturn(status); SparkAppContext mockAppContext = mock(SparkAppContext.class); when(mockAppContext.getResource()).thenReturn(mockApp); when(mockApp.getSpec()).thenReturn(spec); ReconcileProgress progress = cleanUpWithReason.reconcile(mockAppContext, mockRecorder); Assertions.assertEquals(ReconcileProgress.completeAndNoRequeue(), progress); verify(mockAppContext, times(1)).getResource(); verify(mockApp, times(2)).getSpec(); verify(mockApp, times(2)).getStatus(); verify(mockRecorder).removeCachedStatus(mockApp); verify(mockAppContext).getClient(); verifyNoMoreInteractions(mockAppContext, mockRecorder, mockApp); } @Test void onDemandCleanupForTerminatedAppExpectDelete() { SparkAppStatusRecorder mockRecorder = mock(SparkAppStatusRecorder.class); AppCleanUpStep cleanUpWithReason = new AppCleanUpStep(SparkAppStatusUtils::appCancelled); ApplicationStatus status = prepareApplicationStatus(ApplicationStateSummary.TerminatedWithoutReleaseResources); SparkApplication mockApp = mock(SparkApplication.class); ApplicationSpec spec = ApplicationSpec.builder().build(); when(mockApp.getStatus()).thenReturn(status); SparkAppContext mockAppContext = mock(SparkAppContext.class); when(mockAppContext.getResource()).thenReturn(mockApp); when(mockApp.getSpec()).thenReturn(spec); KubernetesClient mockClient = mock(KubernetesClient.class); when(mockAppContext.getClient()).thenReturn(mockClient); Pod driverPod = mock(Pod.class); when(mockAppContext.getDriverPod()).thenReturn(Optional.of(driverPod)); when(mockAppContext.getDriverPreResourcesSpec()).thenReturn(List.of()); when(mockAppContext.getDriverResourcesSpec()).thenReturn(List.of()); when(mockRecorder.persistStatus(eq(mockAppContext), any())).thenReturn(true); when(mockRecorder.appendNewStateAndPersist(eq(mockAppContext), any())).thenReturn(true); try (MockedStatic<ReconcilerUtils> utils = Mockito.mockStatic(ReconcilerUtils.class)) { ReconcileProgress progress = cleanUpWithReason.reconcile(mockAppContext, mockRecorder); utils.verify(() -> ReconcilerUtils.deleteResourceIfExists(mockClient, driverPod, false)); Assertions.assertEquals( ReconcileProgress.completeAndRequeueAfter(Duration.ofMillis(2000)), progress); } verify(mockAppContext, times(1)).getResource(); verify(mockApp, times(3)).getSpec(); verify(mockApp, times(3)).getStatus(); verify(mockAppContext, times(2)).getClient(); verify(mockAppContext).getDriverPod(); ArgumentCaptor<ApplicationState> captor = ArgumentCaptor.forClass(ApplicationState.class); verify(mockRecorder).appendNewStateAndPersist(eq(mockAppContext), captor.capture()); ApplicationState appState = captor.getValue(); Assertions.assertEquals( ApplicationStateSummary.ResourceReleased, appState.getCurrentStateSummary()); Assertions.assertEquals(Constants.APP_CANCELLED_MESSAGE, appState.getMessage()); verifyNoMoreInteractions(mockAppContext, mockRecorder, mockApp, mockClient, driverPod); } @Test void cleanupForAppExpectDeleteWithRecompute() { SparkAppStatusRecorder mockRecorder = mock(SparkAppStatusRecorder.class); AppCleanUpStep cleanUpWithReason = new AppCleanUpStep(SparkAppStatusUtils::appCancelled); ApplicationStatus status1 = prepareApplicationStatus(ApplicationStateSummary.SchedulingFailure); ApplicationStatus status2 = prepareApplicationStatus( ApplicationStateSummary.SchedulingFailure, ApplicationStateSummary.TerminatedWithoutReleaseResources); SparkApplication mockApp1 = mock(SparkApplication.class); SparkApplication mockApp2 = mock(SparkApplication.class); ApplicationSpec spec = ApplicationSpec.builder().build(); when(mockApp1.getStatus()).thenReturn(status1); when(mockApp2.getStatus()).thenReturn(status2); SparkAppContext mockAppContext1 = mock(SparkAppContext.class); SparkAppContext mockAppContext2 = mock(SparkAppContext.class); when(mockAppContext1.getResource()).thenReturn(mockApp1); when(mockAppContext2.getResource()).thenReturn(mockApp2); when(mockApp1.getSpec()).thenReturn(spec); when(mockApp2.getSpec()).thenReturn(spec); KubernetesClient mockClient = mock(KubernetesClient.class); when(mockAppContext1.getClient()).thenReturn(mockClient); Pod driverPod = mock(Pod.class); Pod driverPodSpec = mock(Pod.class); ConfigMap resource1 = mock(ConfigMap.class); ConfigMap resource2 = mock(ConfigMap.class); when(mockAppContext1.getDriverPod()).thenReturn(Optional.of(driverPod)); when(mockAppContext1.getDriverPodSpec()).thenReturn(driverPodSpec); when(mockAppContext1.getDriverPreResourcesSpec()).thenReturn(List.of(resource1)); when(mockAppContext1.getDriverResourcesSpec()).thenReturn(List.of(resource2)); when(mockAppContext2.getDriverPod()).thenReturn(Optional.of(driverPod)); when(mockAppContext2.getDriverPodSpec()).thenReturn(driverPodSpec); when(mockAppContext2.getDriverPreResourcesSpec()).thenReturn(List.of(resource1)); when(mockAppContext2.getDriverResourcesSpec()).thenReturn(List.of(resource2)); when(mockRecorder.persistStatus(any(), any())).thenReturn(true); when(mockRecorder.appendNewStateAndPersist(any(), any())).thenReturn(true); try (MockedStatic<ReconcilerUtils> utils = Mockito.mockStatic(ReconcilerUtils.class)) { ReconcileProgress progress1 = cleanUpWithReason.reconcile(mockAppContext1, mockRecorder); ReconcileProgress progress2 = cleanUpWithReason.reconcile(mockAppContext2, mockRecorder); utils.verify(() -> ReconcilerUtils.deleteResourceIfExists(mockClient, resource1, false)); utils.verify(() -> ReconcilerUtils.deleteResourceIfExists(mockClient, driverPodSpec, false)); utils.verify(() -> ReconcilerUtils.deleteResourceIfExists(mockClient, resource2, false)); Assertions.assertEquals( ReconcileProgress.completeAndRequeueAfter(Duration.ofMillis(2000)), progress1); Assertions.assertEquals( ReconcileProgress.completeAndRequeueAfter(Duration.ofMillis(2000)), progress2); } verify(mockAppContext1, times(1)).getResource(); verify(mockApp1, times(2)).getSpec(); verify(mockApp1, times(2)).getStatus(); verify(mockAppContext1, times(3)).getClient(); verify(mockAppContext1).getDriverPreResourcesSpec(); verify(mockAppContext1).getDriverPodSpec(); verify(mockAppContext1).getDriverResourcesSpec(); verify(mockAppContext2, times(1)).getResource(); verify(mockApp2, times(2)).getSpec(); verify(mockApp2, times(2)).getStatus(); verify(mockAppContext2, times(3)).getClient(); verify(mockAppContext2).getDriverPreResourcesSpec(); verify(mockAppContext2).getDriverPodSpec(); verify(mockAppContext2).getDriverResourcesSpec(); ArgumentCaptor<ApplicationState> captor = ArgumentCaptor.forClass(ApplicationState.class); verify(mockRecorder).appendNewStateAndPersist(eq(mockAppContext1), captor.capture()); verify(mockRecorder).appendNewStateAndPersist(eq(mockAppContext2), captor.capture()); Assertions.assertEquals(2, captor.getAllValues().size()); ApplicationState appState1 = captor.getAllValues().get(0); Assertions.assertEquals( ApplicationStateSummary.ResourceReleased, appState1.getCurrentStateSummary()); Assertions.assertEquals(Constants.APP_CANCELLED_MESSAGE, appState1.getMessage()); ApplicationState appState2 = captor.getAllValues().get(1); Assertions.assertEquals( ApplicationStateSummary.ResourceReleased, appState2.getCurrentStateSummary()); Assertions.assertEquals(Constants.APP_CANCELLED_MESSAGE, appState2.getMessage()); verifyNoMoreInteractions( mockAppContext1, mockAppContext2, mockRecorder, mockApp1, mockApp2, mockClient, driverPod); } @Test void checkEarlyExitForResourceReleasedAppWithoutTTL() { AppCleanUpStep routineCheck = new AppCleanUpStep(); AppCleanUpStep cleanUpWithReason = new AppCleanUpStep(SparkAppStatusUtils::appCancelled); ApplicationStatus succeeded = prepareApplicationStatus( ApplicationStateSummary.ResourceReleased, ApplicationStateSummary.Succeeded); ApplicationStatus failed = prepareApplicationStatus( ApplicationStateSummary.ResourceReleased, ApplicationStateSummary.SchedulingFailure); ApplicationStatus cancelled = prepareApplicationStatus( ApplicationStateSummary.ResourceReleased, ApplicationStateSummary.RunningHealthy); List<ApplicationStatus> statusList = List.of(succeeded, failed, cancelled); KubernetesClient mockClient = mock(KubernetesClient.class); List<ApplicationSpec> specList = List.of(alwaysRetain, neverRetain, exceedRetainDuration, notExceedRetainDuration); for (ApplicationSpec appSpec : specList) { for (ApplicationStatus appStatus : statusList) { SparkAppStatusRecorder mockRecorder1 = mock(SparkAppStatusRecorder.class); SparkAppStatusRecorder mockRecorder2 = mock(SparkAppStatusRecorder.class); SparkApplication mockApp = mock(SparkApplication.class); when(mockApp.getStatus()).thenReturn(appStatus); when(mockApp.getSpec()).thenReturn(appSpec); Optional<ReconcileProgress> routineCheckProgress = routineCheck.checkEarlyExitForTerminatedApp(mockClient, mockApp, mockRecorder1); assertTrue(routineCheckProgress.isPresent()); ReconcileProgress reconcileProgress = routineCheckProgress.get(); Optional<ReconcileProgress> onDemandProgress = cleanUpWithReason.checkEarlyExitForTerminatedApp(mockClient, mockApp, mockRecorder2); Assertions.assertEquals(ReconcileProgress.completeAndNoRequeue(), reconcileProgress); verify(mockRecorder1).removeCachedStatus(mockApp); assertTrue(onDemandProgress.isPresent()); Assertions.assertEquals( ReconcileProgress.completeAndNoRequeue(), routineCheckProgress.get()); verify(mockRecorder2).removeCachedStatus(mockApp); } } } @Test void checkEarlyExitForResourceReleasedAppWithExceededTTL() { AppCleanUpStep routineCheck = new AppCleanUpStep(); AppCleanUpStep cleanUpWithReason = new AppCleanUpStep(SparkAppStatusUtils::appCancelled); ApplicationStatus succeeded = prepareApplicationStatus( ApplicationStateSummary.ResourceReleased, ApplicationStateSummary.Succeeded); ApplicationStatus failed = prepareApplicationStatus( ApplicationStateSummary.ResourceReleased, ApplicationStateSummary.SchedulingFailure); ApplicationStatus cancelled = prepareApplicationStatus( ApplicationStateSummary.ResourceReleased, ApplicationStateSummary.Failed); List<ApplicationStatus> statusList = List.of(succeeded, failed, cancelled); KubernetesClient mockClient = mock(KubernetesClient.class); for (ApplicationStatus appStatus : statusList) { SparkAppStatusRecorder mockRecorder1 = mock(SparkAppStatusRecorder.class); SparkAppStatusRecorder mockRecorder2 = mock(SparkAppStatusRecorder.class); SparkApplication mockApp = mock(SparkApplication.class); when(mockApp.getStatus()).thenReturn(appStatus); when(mockApp.getSpec()).thenReturn(exceedRetainDurationFromTtl); try (MockedStatic<ReconcilerUtils> utils = Mockito.mockStatic(ReconcilerUtils.class)) { Optional<ReconcileProgress> routineCheckProgress = routineCheck.checkEarlyExitForTerminatedApp(mockClient, mockApp, mockRecorder1); assertTrue(routineCheckProgress.isPresent()); ReconcileProgress reconcileProgress = routineCheckProgress.get(); assertTrue(reconcileProgress.isCompleted()); assertFalse(reconcileProgress.isRequeue()); utils.verify(() -> ReconcilerUtils.deleteResourceIfExists(mockClient, mockApp, true)); verify(mockRecorder1).removeCachedStatus(mockApp); Optional<ReconcileProgress> onDemandProgress = cleanUpWithReason.checkEarlyExitForTerminatedApp(mockClient, mockApp, mockRecorder2); verify(mockRecorder1).removeCachedStatus(mockApp); assertTrue(onDemandProgress.isPresent()); ReconcileProgress reconcileProgressOnDemand = onDemandProgress.get(); Assertions.assertEquals( ReconcileProgress.completeAndNoRequeue(), reconcileProgressOnDemand); verify(mockRecorder2).removeCachedStatus(mockApp); } } } @Test void checkEarlyExitForResourceReleasedAppWithinTTL() { AppCleanUpStep routineCheck = new AppCleanUpStep(); AppCleanUpStep cleanUpWithReason = new AppCleanUpStep(SparkAppStatusUtils::appCancelled); ApplicationStatus succeeded = prepareApplicationStatus( ApplicationStateSummary.ResourceReleased, ApplicationStateSummary.Succeeded); ApplicationStatus failed = prepareApplicationStatus( ApplicationStateSummary.ResourceReleased, ApplicationStateSummary.SchedulingFailure); ApplicationStatus cancelled = prepareApplicationStatus( ApplicationStateSummary.ResourceReleased, ApplicationStateSummary.Failed); List<ApplicationStatus> statusList = List.of(succeeded, failed, cancelled); KubernetesClient mockClient = mock(KubernetesClient.class); for (ApplicationStatus appStatus : statusList) { SparkAppStatusRecorder mockRecorder1 = mock(SparkAppStatusRecorder.class); SparkAppStatusRecorder mockRecorder2 = mock(SparkAppStatusRecorder.class); SparkApplication mockApp = mock(SparkApplication.class); when(mockApp.getStatus()).thenReturn(appStatus); when(mockApp.getSpec()).thenReturn(notExceedTtl); try (MockedStatic<ReconcilerUtils> utils = Mockito.mockStatic(ReconcilerUtils.class)) { Optional<ReconcileProgress> routineCheckProgress = routineCheck.checkEarlyExitForTerminatedApp(mockClient, mockApp, mockRecorder1); assertTrue(routineCheckProgress.isPresent()); ReconcileProgress reconcileProgress = routineCheckProgress.get(); assertTrue(reconcileProgress.isCompleted()); assertTrue(reconcileProgress.isRequeue()); assertTrue(reconcileProgress.getRequeueAfterDuration().toMillis() > 0); utils.verifyNoInteractions(); verifyNoMoreInteractions(mockRecorder1); Optional<ReconcileProgress> onDemandProgress = cleanUpWithReason.checkEarlyExitForTerminatedApp(mockClient, mockApp, mockRecorder2); assertTrue(onDemandProgress.isPresent()); ReconcileProgress reconcileProgressOnDemand = onDemandProgress.get(); Assertions.assertEquals( ReconcileProgress.completeAndNoRequeue(), reconcileProgressOnDemand); verify(mockRecorder2).removeCachedStatus(mockApp); } } } @Test void checkEarlyExitForAppTerminatedWithoutReleaseResourcesInfiniteRetain() { AppCleanUpStep routineCheck = new AppCleanUpStep(); AppCleanUpStep cleanUpWithReason = new AppCleanUpStep(SparkAppStatusUtils::appCancelled); ApplicationStatus succeeded = prepareApplicationStatus( ApplicationStateSummary.TerminatedWithoutReleaseResources, ApplicationStateSummary.Succeeded); ApplicationStatus failed = prepareApplicationStatus( ApplicationStateSummary.TerminatedWithoutReleaseResources, ApplicationStateSummary.SchedulingFailure); ApplicationStatus cancelled = prepareApplicationStatus( ApplicationStateSummary.TerminatedWithoutReleaseResources, ApplicationStateSummary.RunningHealthy); List<ApplicationStatus> statusList = List.of(succeeded, failed, cancelled); KubernetesClient mockClient = mock(KubernetesClient.class); for (ApplicationStatus appStatus : statusList) { SparkAppStatusRecorder mockRecorder1 = mock(SparkAppStatusRecorder.class); SparkAppStatusRecorder mockRecorder2 = mock(SparkAppStatusRecorder.class); SparkApplication mockApp = mock(SparkApplication.class); when(mockApp.getStatus()).thenReturn(appStatus); when(mockApp.getSpec()).thenReturn(alwaysRetain); Optional<ReconcileProgress> routineCheckProgress = routineCheck.checkEarlyExitForTerminatedApp(mockClient, mockApp, mockRecorder1); assertTrue(routineCheckProgress.isPresent()); Assertions.assertEquals(ReconcileProgress.completeAndNoRequeue(), routineCheckProgress.get()); verify(mockRecorder1).removeCachedStatus(mockApp); Optional<ReconcileProgress> onDemandProgress = cleanUpWithReason.checkEarlyExitForTerminatedApp(mockClient, mockApp, mockRecorder2); assertFalse(onDemandProgress.isPresent()); verifyNoMoreInteractions(mockRecorder2); } } @Test void checkEarlyExitForAppTerminatedWithoutReleaseResourcesExceededRetainDuration() { AppCleanUpStep routineCheck = new AppCleanUpStep(); AppCleanUpStep cleanUpWithReason = new AppCleanUpStep(SparkAppStatusUtils::appCancelled); ApplicationStatus succeeded = prepareApplicationStatus( ApplicationStateSummary.TerminatedWithoutReleaseResources, ApplicationStateSummary.Succeeded); ApplicationStatus failed = prepareApplicationStatus( ApplicationStateSummary.TerminatedWithoutReleaseResources, ApplicationStateSummary.SchedulingFailure); ApplicationStatus cancelled = prepareApplicationStatus( ApplicationStateSummary.TerminatedWithoutReleaseResources, ApplicationStateSummary.RunningHealthy); List<ApplicationStatus> statusList = List.of(succeeded, failed, cancelled); KubernetesClient mockClient = mock(KubernetesClient.class); List<ApplicationSpec> specs = List.of(exceedRetainDuration, exceedRetainDurationFromTtl); for (ApplicationSpec spec : specs) { for (ApplicationStatus appStatus : statusList) { SparkAppStatusRecorder mockRecorder1 = mock(SparkAppStatusRecorder.class); SparkAppStatusRecorder mockRecorder2 = mock(SparkAppStatusRecorder.class); SparkApplication mockApp = mock(SparkApplication.class); when(mockApp.getStatus()).thenReturn(appStatus); when(mockApp.getSpec()).thenReturn(spec); Optional<ReconcileProgress> routineCheckProgress = routineCheck.checkEarlyExitForTerminatedApp(mockClient, mockApp, mockRecorder1); assertFalse(routineCheckProgress.isPresent()); verifyNoMoreInteractions(mockRecorder1, mockClient); Optional<ReconcileProgress> onDemandProgress = cleanUpWithReason.checkEarlyExitForTerminatedApp(mockClient, mockApp, mockRecorder2); assertFalse(onDemandProgress.isPresent()); verifyNoMoreInteractions(mockRecorder2, mockClient); } } } @Test void checkEarlyExitForAppTerminatedWithoutReleaseResourcesWithinRetainDuration() { AppCleanUpStep routineCheck = new AppCleanUpStep(); AppCleanUpStep cleanUpWithReason = new AppCleanUpStep(SparkAppStatusUtils::appCancelled); ApplicationStatus succeeded = prepareApplicationStatus( ApplicationStateSummary.TerminatedWithoutReleaseResources, ApplicationStateSummary.Succeeded); ApplicationStatus failed = prepareApplicationStatus( ApplicationStateSummary.TerminatedWithoutReleaseResources, ApplicationStateSummary.SchedulingFailure); ApplicationStatus cancelled = prepareApplicationStatus( ApplicationStateSummary.TerminatedWithoutReleaseResources, ApplicationStateSummary.RunningHealthy); List<ApplicationStatus> statusList = List.of(succeeded, failed, cancelled); KubernetesClient mockClient = mock(KubernetesClient.class); for (ApplicationStatus appStatus : statusList) { SparkAppStatusRecorder mockRecorder1 = mock(SparkAppStatusRecorder.class); SparkAppStatusRecorder mockRecorder2 = mock(SparkAppStatusRecorder.class); SparkApplication mockApp = mock(SparkApplication.class); when(mockApp.getStatus()).thenReturn(appStatus); when(mockApp.getSpec()).thenReturn(notExceedRetainDuration); Optional<ReconcileProgress> routineCheckProgress = routineCheck.checkEarlyExitForTerminatedApp(mockClient, mockApp, mockRecorder1); assertTrue(routineCheckProgress.isPresent()); ReconcileProgress reconcileProgress = routineCheckProgress.get(); assertTrue(reconcileProgress.isCompleted()); assertTrue(reconcileProgress.isRequeue()); verifyNoMoreInteractions(mockRecorder2, mockClient); Optional<ReconcileProgress> onDemandProgress = cleanUpWithReason.checkEarlyExitForTerminatedApp(mockClient, mockApp, mockRecorder2); assertFalse(onDemandProgress.isPresent()); verifyNoMoreInteractions(mockRecorder2, mockClient); } } @Test void checkEarlyExitForNotTerminatedApp() { AppCleanUpStep routineCheck = new AppCleanUpStep(); AppCleanUpStep cleanUpWithReason = new AppCleanUpStep(SparkAppStatusUtils::appCancelled); for (ApplicationStateSummary stateSummary : ApplicationStateSummary.values()) { if (stateSummary.isTerminated()) { continue; } ApplicationStatus status = prepareApplicationStatus(stateSummary); KubernetesClient mockClient = mock(KubernetesClient.class); for (ApplicationSpec appSpec : specs) { SparkAppStatusRecorder mockRecorder1 = mock(SparkAppStatusRecorder.class); SparkAppStatusRecorder mockRecorder2 = mock(SparkAppStatusRecorder.class); SparkApplication mockApp = mock(SparkApplication.class); when(mockApp.getStatus()).thenReturn(status); when(mockApp.getSpec()).thenReturn(appSpec); Optional<ReconcileProgress> routineCheckProgress = routineCheck.checkEarlyExitForTerminatedApp(mockClient, mockApp, mockRecorder1); assertTrue(routineCheckProgress.isEmpty()); verifyNoMoreInteractions(mockRecorder1, mockClient); Optional<ReconcileProgress> onDemandProgress = cleanUpWithReason.checkEarlyExitForTerminatedApp(mockClient, mockApp, mockRecorder2); assertTrue(onDemandProgress.isEmpty()); verifyNoMoreInteractions(mockRecorder2, mockClient); } } } private ApplicationStatus prepareApplicationStatus(ApplicationStateSummary currentStateSummary) { ApplicationStatus status = new ApplicationStatus(); ApplicationState state = new ApplicationState(currentStateSummary, "foo"); // to make sure the state exceeds threshold state.setLastTransitionTime(Instant.now().minusSeconds(10).toString()); return status.appendNewState(state); } private ApplicationStatus prepareApplicationStatus( ApplicationStateSummary currentStateSummary, ApplicationStateSummary previousStateSummary) { ApplicationStatus status = prepareApplicationStatus(previousStateSummary); ApplicationState state = new ApplicationState(currentStateSummary, "foo"); state.setLastTransitionTime(Instant.now().minusSeconds(5).toString()); return status.appendNewState(state); } @Test void isReleasingResourcesForSchedulingFailureAttempt() { AppCleanUpStep appCleanUpStep = new AppCleanUpStep(); ApplicationStatus status = new ApplicationStatus(); assertFalse(appCleanUpStep.isReleasingResourcesForSchedulingFailureAttempt(status)); status = status.appendNewState(new ApplicationState(ApplicationStateSummary.DriverRequested, "foo")); assertFalse(appCleanUpStep.isReleasingResourcesForSchedulingFailureAttempt(status)); status = status.appendNewState(new ApplicationState(ApplicationStateSummary.RunningHealthy, "foo")); assertFalse(appCleanUpStep.isReleasingResourcesForSchedulingFailureAttempt(status)); status = status.appendNewState(new ApplicationState(ApplicationStateSummary.Failed, "foo")); assertFalse(appCleanUpStep.isReleasingResourcesForSchedulingFailureAttempt(status)); status = status.appendNewState( new ApplicationState(ApplicationStateSummary.ScheduledToRestart, "foo")); assertFalse(appCleanUpStep.isReleasingResourcesForSchedulingFailureAttempt(status)); status = status.appendNewState( new ApplicationState(ApplicationStateSummary.SchedulingFailure, "foo")); assertTrue(appCleanUpStep.isReleasingResourcesForSchedulingFailureAttempt(status)); status = status.appendNewState( new ApplicationState(ApplicationStateSummary.TerminatedWithoutReleaseResources, "foo")); assertTrue(appCleanUpStep.isReleasingResourcesForSchedulingFailureAttempt(status)); } }
apache/jclouds
34,973
apis/ec2/src/main/java/org/jclouds/ec2/domain/InstanceType.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jclouds.ec2.domain; public class InstanceType { /** * Micro Instance * <ul> * <li>613 MB of memory</li> * <li>up to 2 ECUs (for short periodic bursts)</li> * <li>No instance storage (EBS storage only)</li> * <li>32-bit or 64-bit platform</li> * </ul> */ public static final String T1_MICRO = "t1.micro"; /** * Micro Burstable Performance Instance * <ul> * <li>512 MB memory</li> * <li>1 vCPU / 10% baseline performance</li> * <li>No instance storage (EBS storage only)</li> * <li>64-bit platform</li> * </ul> */ public static final String T2_NANO = "t2.nano"; /** * Micro Burstable Performance Instance * <ul> * <li>1 GB memory</li> * <li>1 vCPU / 10% baseline performance</li> * <li>No instance storage (EBS storage only)</li> * <li>64-bit platform</li> * </ul> */ public static final String T2_MICRO = "t2.micro"; /** * Micro Burstable Performance Instance * <ul> * <li>2 GB memory</li> * <li>1 vCPU / 20% baseline performance</li> * <li>No instance storage (EBS storage only)</li> * <li>64-bit platform</li> * </ul> */ public static final String T2_SMALL = "t2.small"; /** * Micro Burstable Performance Instance * <ul> * <li>4 GB memory</li> * <li>2 vCPU / 40% baseline performance</li> * <li>No instance storage (EBS storage only)</li> * <li>64-bit platform</li> * </ul> */ public static final String T2_MEDIUM = "t2.medium"; /** * Micro Burstable Performance Instance * <ul> * <li>8 GB memory</li> * <li>2 vCPU / 40% baseline performance</li> * <li>No instance storage (EBS storage only)</li> * <li>64-bit platform</li> * </ul> */ public static final String T2_LARGE = "t2.large"; /** * Micro Burstable Performance Instance * <ul> * <li>16 GB memory</li> * <li>4 vCPU / 40% baseline performance</li> * <li>No instance storage (EBS storage only)</li> * <li>64-bit platform</li> * </ul> */ public static final String T2_XLARGE = "t2.xlarge"; /** * Micro Burstable Performance Instance * <ul> * <li>32 GB memory</li> * <li>8 vCPU / 40% baseline performance</li> * <li>No instance storage (EBS storage only)</li> * <li>64-bit platform</li> * </ul> */ public static final String T2_2XLARGE = "t2.2xlarge"; /** * Micro Burstable Performance Instance * <ul> * <li>512 MB memory</li> * <li>2 vCPU / 10% baseline performance</li> * <li>No instance storage (EBS storage only)</li> * <li>64-bit platform</li> * </ul> */ public static final String T3_NANO = "t3.nano"; /** * Micro Burstable Performance Instance * <ul> * <li>1 GB memory</li> * <li>2 vCPU / 10% baseline performance</li> * <li>No instance storage (EBS storage only)</li> * <li>64-bit platform</li> * </ul> */ public static final String T3_MICRO = "t3.micro"; /** * Micro Burstable Performance Instance * <ul> * <li>2 GB memory</li> * <li>2 vCPU / 10% baseline performance</li> * <li>No instance storage (EBS storage only)</li> * <li>64-bit platform</li> * </ul> */ public static final String T3_SMALL = "t3.small"; /** * Micro Burstable Performance Instance * <ul> * <li>4 GB memory</li> * <li>2 vCPU / 10% baseline performance</li> * <li>No instance storage (EBS storage only)</li> * <li>64-bit platform</li> * </ul> */ public static final String T3_MEDIUM = "t3.medium"; /** * Micro Burstable Performance Instance * <ul> * <li>8 GB memory</li> * <li>2 vCPU / 10% baseline performance</li> * <li>No instance storage (EBS storage only)</li> * <li>64-bit platform</li> * </ul> */ public static final String T3_LARGE = "t3.large"; /** * Micro Burstable Performance Instance * <ul> * <li>16 GB memory</li> * <li>4 vCPU / 10% baseline performance</li> * <li>No instance storage (EBS storage only)</li> * <li>64-bit platform</li> * </ul> */ public static final String T3_XLARGE = "t3.xlarge"; /** * Micro Burstable Performance Instance * <ul> * <li>32 GB memory</li> * <li>8 vCPU / 10% baseline performance</li> * <li>No instance storage (EBS storage only)</li> * <li>64-bit platform</li> * </ul> */ public static final String T3_2XLARGE = "t3.2xlarge"; /** * Small Instance * <ul> * <li>1.7 GB memory</li> * <li>1 EC2 Compute Unit (1 virtual core with 1 EC2 Compute Unit)</li> * <li>160 GB instance storage (150 GB plus 10 GB root partition)</li> * <li>32-bit or 64-bit platform</li> * <li>I/O Performance: Moderate</li> * </ul> */ public static final String M1_SMALL = "m1.small"; /** * Medium Instance * <ul> * <li>3.75 GB memory</li> * <li>2 EC2 Compute Unit (1 virtual core with 2 EC2 Compute Unit)</li> * <li>410 GB instance storage</li> * <li>32-bit or 64-bit platform</li> * <li>I/O Performance: Moderate</li> * </ul> */ public static final String M1_MEDIUM = "m1.medium"; /** * Large Instance * <ul> * <li>7.5 GB memory</li> * <li>4 EC2 Compute Units (2 virtual cores with 2 EC2 Compute Units each)</li> * <li>850 GB instance storage (2x420 GB plus 10 GB root partition)</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String M1_LARGE = "m1.large"; /** * Extra Large Instance * <ul> * <li>15 GB memory</li> * <li>8 EC2 Compute Units (4 virtual cores with 2 EC2 Compute Units each)</li> * <li>1690 GB instance storage (4x420 GB plus 10 GB root partition)</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String M1_XLARGE = "m1.xlarge"; /** * High-Memory Extra Large Instance * <ul> * <li>17.1 GB of memory</li> * <li>6.5 EC2 Compute Units (2 virtual cores with 3.25 EC2 Compute Units * each)</li> * <li>420 GB of instance storage</li> * <li>64-bit platform</li> * <li>I/O Performance: Moderate</li> * </ul> */ public static final String M2_XLARGE = "m2.xlarge"; /** * High-Memory Double Extra Large Instance * <ul> * <li>34.2 GB of memory</li> * <li>13 EC2 Compute Units (4 virtual cores with 3.25 EC2 Compute Units * each)</li> * <li>850 GB of instance storage</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String M2_2XLARGE = "m2.2xlarge"; /** * High-Memory Quadruple Extra Large Instance * <ul> * <li>68.4 GB of memory</li> * <li>26 EC2 Compute Units (8 virtual cores with 3.25 EC2 Compute Units * each)</li> * <li>1690 GB of instance storage</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String M2_4XLARGE = "m2.4xlarge"; /** * M3 Medium Instance * <ul> * <li>3.75 GiB memory</li> * <li>3 EC2 Compute Units (1 virtual core with 3 EC2 Compute Units)</li> * <li>1 SSD-based volume with 4 GiB of instance storage</li> * <li>32-bit or 64-bit platform</li> * <li>I/O Performance: Moderate</li> * </ul> */ public static final String M3_MEDIUM = "m3.medium"; /** * M3 Large Instance * <ul> * <li>7 GiB memory</li> * <li>6.5 EC2 Compute Units (2 virtual cores with 3.25 EC2 Compute Units each)</li> * <li>1 SSD-based volume with 32 GiB of instance storage</li> * <li>32-bit or 64-bit platform</li> * <li>I/O Performance: Moderate</li> * </ul> */ public static final String M3_LARGE = "m3.large"; /** * M3 Extra Large Instance * <ul> * <li>15 GiB memory</li> * <li>13 EC2 Compute Units (4 virtual cores with 3.25 EC2 Compute Units each)</li> * <li>EBS storage only</li> * <li>64-bit platform</li> * <li>I/O Performance: Moderate</li> * </ul> */ public static final String M3_XLARGE = "m3.xlarge"; /** * M3 Double Extra Large Instance * <ul> * <li>30 GiB memory</li> * <li>26 EC2 Compute Units (8 virtual cores with 3.25 EC2 Compute Units each)</li> * <li>EBS storage only</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String M3_2XLARGE = "m3.2xlarge"; /** * M4 Large Instance * <ul> * <li>8 GiB memory</li> * <li>6.5 EC2 Compute Units (2 virtual cores with 3.25 EC2 Compute Units each)</li> * <li>EBS storage only</li> * <li>64-bit platform</li> * <li>I/O Performance: Moderate</li> * </ul> */ public static final String M4_LARGE = "m4.large"; /** * M4 Extra Large Instance * <ul> * <li>16 GiB memory</li> * <li>13 EC2 Compute Units (4 virtual cores with 3.25 EC2 Compute Units each)</li> * <li>EBS storage only</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String M4_XLARGE = "m4.xlarge"; /** * M4 Double Extra Large Instance * <ul> * <li>32 GiB memory</li> * <li>26 EC2 Compute Units (8 virtual cores with 3.25 EC2 Compute Units each)</li> * <li>EBS storage only</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String M4_2XLARGE = "m4.2xlarge"; /** * M4 Quadruple Extra Large Instance * <ul> * <li>64 GiB memory</li> * <li>53.5 EC2 Compute Units (16 virtual cores with 3.34375 EC2 Compute Units each)</li> * <li>EBS storage only</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String M4_4XLARGE = "m4.4xlarge"; /** * M4 Decuple Extra Large Instance * <ul> * <li>160 GiB memory</li> * <li>124.5 EC2 Compute Units (40 virtual cores with 3.1125 EC2 Compute Units each)</li> * <li>EBS storage only</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String M4_10XLARGE = "m4.10xlarge"; /** * M4 Sexdecuple Extra Large Instance * <ul> * <li>256 GiB memory</li> * <li>199.2 EC2 Compute Units (64 virtual cores with 3.1125 EC2 Compute Units each)</li> * <li>EBS storage only</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String M4_16XLARGE = "m4.16xlarge"; /** * M5 Large Instance * <ul> * <li>8 GiB memory</li> * <li>5 EC2 Compute Units (2 virtual cores with 2.5 EC2 Compute Units each)</li> * <li>EBS storage only</li> * <li>64-bit platform</li> * <li>I/O Performance: Moderate</li> * </ul> */ public static final String M5_LARGE = "m5.large"; /** * M5 Extra Large Instance * <ul> * <li>16 GiB memory</li> * <li>10 EC2 Compute Units (4 virtual cores with 2.5 EC2 Compute Units each)</li> * <li>EBS storage only</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String M5_XLARGE = "m5.xlarge"; /** * M5 Double Extra Large Instance * <ul> * <li>32 GiB memory</li> * <li>26 EC2 Compute Units (8 virtual cores with 2.5 EC2 Compute Units each)</li> * <li>EBS storage only</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String M5_2XLARGE = "m5.2xlarge"; /** * M5 Quadruple Extra Large Instance * <ul> * <li>64 GiB memory</li> * <li>40 EC2 Compute Units (16 virtual cores with 2.5 EC2 Compute Units each)</li> * <li>EBS storage only</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String M5_4XLARGE = "m5.4xlarge"; /** * M5 12x Extra Large Instance * <ul> * <li>192 GiB memory</li> * <li>144 EC2 Compute Units (48 virtual cores with 2.5 EC2 Compute Units each)</li> * <li>EBS storage only</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String M5_12XLARGE = "m5.12xlarge"; /** * M5 24x Extra Large Instance * <ul> * <li>384 GiB memory</li> * <li>288 EC2 Compute Units (96 virtual cores with 2.5 EC2 Compute Units each)</li> * <li>EBS storage only</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String M5_24XLARGE = "m5.24xlarge"; /** * M5D Large Instance * <ul> * <li>8 GiB memory</li> * <li>5 EC2 Compute Units (2 virtual cores with 2.5 EC2 Compute Units each)</li> * <li>1 x 75 NVMe SSD</li> * <li>EBS storage</li> * <li>64-bit platform</li> * <li>I/O Performance: Moderate</li> * </ul> */ public static final String M5D_LARGE = "m5d.large"; /** * M5D Extra Large Instance * <ul> * <li>16 GiB memory</li> * <li>10 EC2 Compute Units (4 virtual cores with 2.5 EC2 Compute Units each)</li> * <li>1 x 150 NVMe SSD</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String M5D_XLARGE = "m5d.xlarge"; /** * M5D Double Extra Large Instance * <ul> * <li>32 GiB memory</li> * <li>26 EC2 Compute Units (8 virtual cores with 2.5 EC2 Compute Units each)</li> * <li>1 x 300 NVMe SSD</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String M5D_2XLARGE = "m5d.2xlarge"; /** * M5D Quadruple Extra Large Instance * <ul> * <li>64 GiB memory</li> * <li>40 EC2 Compute Units (16 virtual cores with 2.5 EC2 Compute Units each)</li> * <li> x 300 NVMe SSD</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String M5D_4XLARGE = "m5d.4xlarge"; /** * M5D 12x Extra Large Instance * <ul> * <li>192 GiB memory</li> * <li>144 EC2 Compute Units (48 virtual cores with 2.5 EC2 Compute Units each)</li> * <li>2 x 900 NVMe SSD</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String M5D_12XLARGE = "m5d.12xlarge"; /** * M5D 24x Extra Large Instance * <ul> * <li>384 GiB memory</li> * <li>288 EC2 Compute Units (96 virtual cores with 2.5 EC2 Compute Units each)</li> * <li>4 x 900 NVMe SSD</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String M5D_24XLARGE = "m5d.24xlarge"; /** * High-CPU Medium Instance * <ul> * <li>1.7 GB of memory</li> * <li>5 EC2 Compute Units (2 virtual cores with 2.5 EC2 Compute Units each)</li> * <li>350 GB of instance storage</li> * <li>32-bit platform</li> * <li>I/O Performance: Moderate</li> * </ul> */ public static final String C1_MEDIUM = "c1.medium"; /** * High-CPU Extra Large Instance * <ul> * <li>7 GB of memory</li> * <li>20 EC2 Compute Units (8 virtual cores with 2.5 EC2 Compute Units each) * </li> * <li>1690 GB of instance storage</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String C1_XLARGE = "c1.xlarge"; /** * Cluster Compute Instance * <ul> * <li>22 GB of memory</li> * <li>33.5 EC2 Compute Units (2 x Intel Xeon X5570, quad-core "Nehalem" * architecture)</li> * <li>1690 GB of 64-bit storage (2 x 840 GB, plus 10 GB root partition)</li> * <li>10 Gbps Ethernet</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String CG1_4XLARGE = "cg1.4xlarge"; /** * Cluster Compute Instance * <ul> * <li>23 GB of memory</li> * <li>33.5 EC2 Compute Units (2 x Intel Xeon X5570, quad-core "Nehalem" * architecture)</li> * <li>1690 GB of 64-bit storage (2 x 840 GB, plus 10 GB root partition)</li> * <li>10 Gbps Ethernet</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String CC1_4XLARGE = "cc1.4xlarge"; /** * Cluster Compute Eight Extra Large specifications * <ul> * <li>60.5 GB of memory</li> * <li>88 EC2 Compute Units (Eight-core 2 x Intel Xeon)</li> * <li>3370 GB of 64-bit storage (4 x 840 GB, plus 10 GB root partition)</li> * <li>10 Gbps Ethernet</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String CC2_8XLARGE = "cc2.8xlarge"; /** * High I/O Quadruple Extra Large specifications * <ul> * <li>60.5 GB of memory</li> * <li>35 EC2 Compute Units (16 virtual cores)</li> * <li>2 SSD-based volumes each with 1024 GB of instance storage</li> * <li>64-bit platform</li> * <li>I/O Performance: Very High (10 Gigabit Ethernet)</li> * <li>Storage I/O Performance: Very High**</li> * </ul> */ public static final String HI1_4XLARGE = "hi1.4xlarge"; /** * High Storage Eight Extra Large * <ul> * <li>117 GiB of memory</li> * <li>35 EC2 Compute Units (16 virtual cores*)</li> * <li>24 hard disk drives each with 2 TB of instance storage</li> * <li>64-bit platform</li> * <li>I/O Performance: Very High (10 Gigabit Ethernet)</li> * <li>Storage I/O Performance: Very High**</li> * </ul> */ public static final String HS1_8XLARGE = "hs1.8xlarge"; /** * GPU Instance Double Extra Large * <ul> * <li>15 GiB of memory</li> * <li>26 EC2 Compute Units (8 virtual cores*), 1xNVIDIA GRID GPU (Kepler GK104)</li> * <li>60 GB instance storage</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String G2_2XLARGE = "g2.2xlarge"; /** * C3 High-CPU Large * <ul> * <li>3.75 GiB of memory</li> * <li>7 EC2 Compute Units (2 virtual cores)</li> * <li>2 SSD-based volumes each with 16 GiB of instance storage</li> * <li>64-bit platform</li> * <li>I/O Performance: Moderate</li> * </ul> */ public static final String C3_LARGE = "c3.large"; /** * C3 High-CPU Extra Large * <ul> * <li>7 GiB of memory</li> * <li>14 EC2 Compute Units (4 virtual cores)</li> * <li>2 SSD-based volumes each with 40 GiB of instance storage</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String C3_XLARGE = "c3.xlarge"; /** * C3 High-CPU Double Extra Large * <ul> * <li>15 GiB of memory</li> * <li>28 EC2 Compute Units (8 virtual cores)</li> * <li>2 SSD-based volumes each with 80 GiB of instance storage</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String C3_2XLARGE = "c3.2xlarge"; /** * C3 High-CPU Quadruple Extra Large * <ul> * <li>30 GiB of memory</li> * <li>55 EC2 Compute Units (16 virtual cores)</li> * <li>2 SSD-based volumes each with 160 GiB of instance storage</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String C3_4XLARGE = "c3.4xlarge"; /** * C3 High-CPU Octuple Extra Large * <ul> * <li>60 GiB of memory</li> * <li>108 EC2 Compute Units (32 virtual cores)</li> * <li>2 SSD-based volumes each with 320 GiB of instance storage</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String C3_8XLARGE = "c3.8xlarge"; /** * C4 Compute-optimized Extra Large * <ul> * <li>7.5 GiB of memory</li> * <li>4 vCPU</li> * <li>No instance storage (EBS storage only)</li> * <li>750 Mbps Dedicated EBS Throughput</li> * </ul> */ public static final String C4_XLARGE = "c4.xlarge"; /** * C4 Compute-optimized Double Extra Large * <ul> * <li>8 GiB of memory</li> * <li>15 vCPU</li> * <li>No instance storage (EBS storage only)</li> * <li>1000 Mbps Dedicated EBS Throughput</li> * </ul> */ public static final String C4_2XLARGE = "c4.2xlarge"; /** * C4 Compute-optimized Quadruple Extra Large * <ul> * <li>30 GiB of memory</li> * <li>16 vCPU</li> * <li>No instance storage (EBS storage only)</li> * <li>2000 Mbps Dedicated EBS Throughput</li> * </ul> */ public static final String C4_4XLARGE = "c4.4xlarge"; /** * C4 Compute-optimized Octuple Extra Large * <ul> * <li>60 GiB of memory</li> * <li>36 vCPU</li> * <li>No instance storage (EBS storage only)</li> * <li>4000 Mbps Dedicated EBS Throughput</li> * </ul> */ public static final String C4_8XLARGE = "c4.8xlarge"; /** * C4 Compute-optimized Large * <ul> * <li>3.75 GiB of memory</li> * <li>2 vCPU</li> * <li>No instance storage (EBS storage only)</li> * <li>500 Mbps Dedicated EBS Throughput</li> * </ul> */ public static final String C4_LARGE = "c4.large"; /** * C5 Compute-optimized Large * <ul> * <li>4 GiB of memory</li> * <li>2 vCPU</li> * <li>No instance storage (EBS storage only)</li> * <li>Up to 25 Gbps of network bandwidth using Elastic Network Adapter (ENA)-based Enhanced Networking</li> * </ul> */ public static final String C5_LARGE = "c5.large"; /** * C5 Compute-optimized X Large * <ul> * <li>8 GiB of memory</li> * <li>4 vCPU</li> * <li>No instance storage (EBS storage only)</li> * <li>Up to 25 Gbps of network bandwidth using Elastic Network Adapter (ENA)-based Enhanced Networking</li> * </ul> */ public static final String C5_XLARGE = "c5.xlarge"; /** * C5 Compute-optimized 2x Large * <ul> * <li>16 GiB of memory</li> * <li>8 vCPU</li> * <li>No instance storage (EBS storage only)</li> * <li>Up to 25 Gbps of network bandwidth using Elastic Network Adapter (ENA)-based Enhanced Networking</li> * </ul> */ public static final String C5_2XLARGE = "c5.2xlarge"; /** * C5 Compute-optimized 4x Large * <ul> * <li>32 GiB of memory</li> * <li>16 vCPU</li> * <li>No instance storage (EBS storage only)</li> * <li>Up to 25 Gbps of network bandwidth using Elastic Network Adapter (ENA)-based Enhanced Networking</li> * </ul> */ public static final String C5_4XLARGE = "c5.4xlarge"; /** * C5 Compute-optimized 9x Large * <ul> * <li>72 GiB of memory</li> * <li>36 vCPU</li> * <li>No instance storage (EBS storage only)</li> * <li>Up to 25 Gbps of network bandwidth using Elastic Network Adapter (ENA)-based Enhanced Networking</li> * </ul> */ public static final String C5_9XLARGE = "c5.9xlarge"; /** * C5 Compute-optimized 18x Large * <ul> * <li>144 GiB of memory</li> * <li>72 vCPU</li> * <li>No instance storage (EBS storage only)</li> * <li>Up to 25 Gbps of network bandwidth using Elastic Network Adapter (ENA)-based Enhanced Networking</li> * </ul> */ public static final String C5_18XLARGE = "c5.18xlarge"; /** * C5D Compute-optimized Large * <ul> * <li>4 GiB of memory</li> * <li>2 vCPU</li> * <li>1 x 50 NVMe SSD</li> * <li>Up to 25 Gbps of network bandwidth using Elastic Network Adapter (ENA)-based Enhanced Networking</li> * </ul> */ public static final String C5D_LARGE = "c5d.large"; /** * C5D Compute-optimized X Large * <ul> * <li>8 GiB of memory</li> * <li>4 vCPU</li> * <li>1 x 100 NVMe SSD</li> * <li>Up to 25 Gbps of network bandwidth using Elastic Network Adapter (ENA)-based Enhanced Networking</li> * </ul> */ public static final String C5D_XLARGE = "c5d.xlarge"; /** * C5D Compute-optimized 2x Large * <ul> * <li>16 GiB of memory</li> * <li>8 vCPU</li> * <li>1 x 225 NVMe SSD</li> * <li>Up to 25 Gbps of network bandwidth using Elastic Network Adapter (ENA)-based Enhanced Networking</li> * </ul> */ public static final String C5D_2XLARGE = "c5d.2xlarge"; /** * C5D Compute-optimized 4x Large * <ul> * <li>32 GiB of memory</li> * <li>16 vCPU</li> * <li>1 x 450 NVMe SSD</li> * <li>Up to 25 Gbps of network bandwidth using Elastic Network Adapter (ENA)-based Enhanced Networking</li> * </ul> */ public static final String C5D_4XLARGE = "c5d.4xlarge"; /** * C5D Compute-optimized 9x Large * <ul> * <li>72 GiB of memory</li> * <li>36 vCPU</li> * <li>1 x 900 NVMe SSD</li> * <li>Up to 25 Gbps of network bandwidth using Elastic Network Adapter (ENA)-based Enhanced Networking</li> * </ul> */ public static final String C5D_9XLARGE = "c5d.9xlarge"; /** * C5D Compute-optimized 18x Large * <ul> * <li>144 GiB of memory</li> * <li>72 vCPU</li> * <li>2 x 900 NVMe SSD</li> * <li>Up to 25 Gbps of network bandwidth using Elastic Network Adapter (ENA)-based Enhanced Networking</li> * </ul> */ public static final String C5D_18XLARGE = "c5d.18xlarge"; /** * D2 Dense Storage Extra Large * <ul> * <li>30.5 GiB of memory</li> * <li>4 vCPU</li> * <li>3 x 2000gb HDD</li> * </ul> */ public static final String D2_XLARGE = "d2.xlarge"; /** * D2 Dense Storage Double Extra Large * <ul> * <li>61 GiB of memory</li> * <li>8 vCPU</li> * <li>6 x 2000gb HDD</li> * </ul> */ public static final String D2_2XLARGE = "d2.2xlarge"; /** * D2 Dense Storage Quadruple Extra Large * <ul> * <li>122 GiB of memory</li> * <li>16 vCPU</li> * <li>12 x 2000gb HDD</li> * </ul> */ public static final String D2_4XLARGE = "d2.4xlarge"; /** * D2 Dense Storage Octuple Extra Large * <ul> * <li>244 GiB of memory</li> * <li>36 vCPU</li> * <li>24 x 2000gb HDD</li> * </ul> */ public static final String D2_8XLARGE = "d2.8xlarge"; /** * I2 Extra Large * <ul> * <li>30.5 GiB of memory</li> * <li>14 EC2 Compute Units (4 virtual cores)</li> * <li>1 SSD-based volume with 800 GiB of instance storage</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String I2_XLARGE = "i2.xlarge"; /** * I2 Double Extra Large * <ul> * <li>61 GiB of memory</li> * <li>27 EC2 Compute Units (8 virtual cores)</li> * <li>2 SSD-based volumes each with 800 GiB of instance storage</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String I2_2XLARGE = "i2.2xlarge"; /** * I2 Quadruple Extra Large * <ul> * <li>122 GiB of memory</li> * <li>53 EC2 Compute Units (16 virtual cores)</li> * <li>4 SSD-based volumes each with 800 GiB of instance storage</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String I2_4XLARGE = "i2.4xlarge"; /** * I2 Octuple Extra Large * <ul> * <li>244 GiB of memory</li> * <li>104 EC2 Compute Units (32 virtual cores)</li> * <li>8 SSD-based volumes each with 800 GiB of instance storage</li> * <li>64-bit platform</li> * <li>I/O Performance: High</li> * </ul> */ public static final String I2_8XLARGE = "i2.8xlarge"; /** * R3 Large Memory Optimized * <ul> * <li>15.25 GB memory</li> * <li>2 vCPU</li> * <li>1 SSD-based volume with 32 GiB of instance storage</li> * <li>64-bit platform</li> * </ul> */ public static final String R3_LARGE = "r3.large"; /** * R3 Extra Large Memory Optimized * <ul> * <li>30.5 GB memory</li> * <li>4 vCPU</li> * <li>1 SSD-based volume with 80 GiB of instance storage</li> * <li>64-bit platform</li> * </ul> */ public static final String R3_XLARGE = "r3.xlarge"; /** * R3 Double Extra Large Memory Optimized * <ul> * <li>61 GB memory</li> * <li>8 vCPU</li> * <li>1 SSD-based volume with 160 GiB of instance storage</li> * <li>64-bit platform</li> * <li>Network Performance: High</li> * </ul> */ public static final String R3_2XLARGE = "r3.2xlarge"; /** * R3 Quadruple Extra Large Memory Optimized * <ul> * <li>122 GB memory</li> * <li>16 vCPU</li> * <li>1 SSD-based volume with 320 GiB of instance storage</li> * <li>64-bit platform</li> * <li>Network Performance: High</li> * </ul> */ public static final String R3_4XLARGE = "r3.4xlarge"; /** * R3 Octuple Extra Large Memory Optimized * <ul> * <li>244 GB memory</li> * <li>32 vCPU</li> * <li>2 SSD-based volumes with 320 GiB of instance storage</li> * <li>64-bit platform</li> * <li>Network Performance: 10 Gigabit</li> * </ul> */ public static final String R3_8XLARGE = "r3.8xlarge"; /** * R4 Large Memory Optimized * <ul> * <li>15.25 GB memory</li> * <li>2 vCPU</li> * <li>No instance storage (EBS storage only)</li> * <li>64-bit platform</li> * <li>Network Performance: up to 10 Gigabit</li> * </ul> */ public static final String R4_LARGE = "r4.large"; /** * R4 Extra Large Memory Optimized * <ul> * <li>30.5 GB memory</li> * <li>4 vCPU</li> * <li>No instance storage (EBS storage only)</li> * <li>64-bit platform</li> * <li>Network Performance: up to 10 Gigabit</li> * </ul> */ public static final String R4_XLARGE = "r4.xlarge"; /** * R4 Double Extra Large Memory Optimized * <ul> * <li>61 GB memory</li> * <li>8 vCPU</li> * <li>No instance storage (EBS storage only)</li> * <li>64-bit platform</li> * <li>Network Performance: up to 10 Gigabit</li> * </ul> */ public static final String R4_2XLARGE = "r4.2xlarge"; /** * R4 Quadruple Extra Large Memory Optimized * <ul> * <li>122 GB memory</li> * <li>16 vCPU</li> * <li>No instance storage (EBS storage only)</li> * <li>64-bit platform</li> * <li>Network Performance: up to 10 Gigabit</li> * </ul> */ public static final String R4_4XLARGE = "r4.4xlarge"; /** * R4 Octuple Extra Large Memory Optimized * <ul> * <li>244 GB memory</li> * <li>32 vCPU</li> * <li>No instance storage (EBS storage only)</li> * <li>64-bit platform</li> * <li>Network Performance: 10 Gigabit</li> * </ul> */ public static final String R4_8XLARGE = "r4.8xlarge"; /** * R4 16x Extra Large Memory Optimized * <ul> * <li>488 GB memory</li> * <li>64 vCPU</li> * <li>No instance storage (EBS storage only)</li> * <li>64-bit platform</li> * <li>Network Performance: 25 Gigabit</li> * </ul> */ public static final String R4_16XLARGE = "r4.16xlarge"; /** * X1 16x Large-scale, enterprise-class and in-memory applications * <ul> * <li>976 GB memory</li> * <li>64 vCPU</li> * <li>Intel Xeon E7-8880 v3 (Haswell) processors</li> * <li>1 SSD-based volume with 1920 GiB of instance storage</li> * <li>64-bit platform</li> * <li>Network Performance: 25 Gigabit</li> * </ul> */ public static final String X1_16XLARGE = "x1.16xlarge"; /** * X1 32x Large-scale, enterprise-class and in-memory applications * <ul> * <li>1952 GB memory</li> * <li>128 vCPU</li> * <li>Intel Xeon E7-8880 v3 (Haswell) processors</li> * <li>2 SSD-based volume with 1920 GiB of instance storage</li> * <li>64-bit platform</li> * <li>Network Performance: 25 Gigabit</li> * </ul> */ public static final String X1_32XLARGE = "x1.32xlarge"; /** * A1 Medium * <ul> * <li>Provider Id: a1.medium</li> * <li>vCPU: 1</li> * <li>Memory (GiB): 2</li> * <li>No instance storage (EBS storage only)</li> * <li>64-bit platform</li> * <li>Network Performance: up to 10 Gigabit</li> * </ul> */ public static final String A1_MEDIUM = "a1.medium"; /** * A1 Large * <ul> * <li>Provider Id: a1.large</li> * <li>vCPU: 2</li> * <li>Memory (GiB): 4</li> * <li>No instance storage (EBS storage only)</li> * <li>64-bit platform</li> * <li>Network Performance: up to 10 Gigabit</li> * </ul> */ public static final String A1_LARGE = "a1.large"; /** * A1 XLarge * <ul> * <li>Provider Id: a1.xlarge</li> * <li>vCPU: 4</li> * <li>Memory (GiB): 8</li> * <li>No instance storage (EBS storage only)</li> * <li>64-bit platform</li> * <li>Network Performance: up to 10 Gigabit</li> * </ul> */ public static final String A1_XLARGE = "a1.xlarge"; /** * A1 2XLarge * <ul> * <li>Provider Id: a1.2xlarge</li> * <li>vCPU: 8</li> * <li>Memory (GiB): 16</li> * <li>No instance storage (EBS storage only)</li> * <li>64-bit platform</li> * <li>Network Performance: up to 10 Gigabit</li> * </ul> */ public static final String A1_2XLARGE = "a1.2xlarge"; /** * A1 4XLarge * <ul> * <li>Provider Id: a1.4xlarge</li> * <li>vCPU: 16</li> * <li>Memory (GiB): 32</li> * <li>No instance storage (EBS storage only)</li> * <li>64-bit platform</li> * <li>Network Performance: up to 10 Gigabit</li> * </ul> */ public static final String A1_4XLARGE = "a1.4xlarge"; }
googleapis/google-cloud-java
35,173
java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/PreTunedModel.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1beta1/tuning_job.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1beta1; /** * * * <pre> * A pre-tuned model for continuous tuning. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.PreTunedModel} */ public final class PreTunedModel extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.PreTunedModel) PreTunedModelOrBuilder { private static final long serialVersionUID = 0L; // Use PreTunedModel.newBuilder() to construct. private PreTunedModel(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private PreTunedModel() { tunedModelName_ = ""; checkpointId_ = ""; baseModel_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new PreTunedModel(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.TuningJobProto .internal_static_google_cloud_aiplatform_v1beta1_PreTunedModel_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.TuningJobProto .internal_static_google_cloud_aiplatform_v1beta1_PreTunedModel_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.PreTunedModel.class, com.google.cloud.aiplatform.v1beta1.PreTunedModel.Builder.class); } public static final int TUNED_MODEL_NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object tunedModelName_ = ""; /** * * * <pre> * The resource name of the Model. * E.g., a model resource name with a specified version id or alias: * * `projects/{project}/locations/{location}/models/{model}&#64;{version_id}` * * `projects/{project}/locations/{location}/models/{model}&#64;{alias}` * * Or, omit the version id to use the default version: * * `projects/{project}/locations/{location}/models/{model}` * </pre> * * <code>string tuned_model_name = 1 [(.google.api.resource_reference) = { ... }</code> * * @return The tunedModelName. */ @java.lang.Override public java.lang.String getTunedModelName() { java.lang.Object ref = tunedModelName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); tunedModelName_ = s; return s; } } /** * * * <pre> * The resource name of the Model. * E.g., a model resource name with a specified version id or alias: * * `projects/{project}/locations/{location}/models/{model}&#64;{version_id}` * * `projects/{project}/locations/{location}/models/{model}&#64;{alias}` * * Or, omit the version id to use the default version: * * `projects/{project}/locations/{location}/models/{model}` * </pre> * * <code>string tuned_model_name = 1 [(.google.api.resource_reference) = { ... }</code> * * @return The bytes for tunedModelName. */ @java.lang.Override public com.google.protobuf.ByteString getTunedModelNameBytes() { java.lang.Object ref = tunedModelName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); tunedModelName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int CHECKPOINT_ID_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object checkpointId_ = ""; /** * * * <pre> * Optional. The source checkpoint id. If not specified, the default * checkpoint will be used. * </pre> * * <code>string checkpoint_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The checkpointId. */ @java.lang.Override public java.lang.String getCheckpointId() { java.lang.Object ref = checkpointId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); checkpointId_ = s; return s; } } /** * * * <pre> * Optional. The source checkpoint id. If not specified, the default * checkpoint will be used. * </pre> * * <code>string checkpoint_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for checkpointId. */ @java.lang.Override public com.google.protobuf.ByteString getCheckpointIdBytes() { java.lang.Object ref = checkpointId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); checkpointId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int BASE_MODEL_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object baseModel_ = ""; /** * * * <pre> * Output only. The name of the base model this * [PreTunedModel][google.cloud.aiplatform.v1beta1.PreTunedModel] was tuned * from. * </pre> * * <code>string base_model = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The baseModel. */ @java.lang.Override public java.lang.String getBaseModel() { java.lang.Object ref = baseModel_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); baseModel_ = s; return s; } } /** * * * <pre> * Output only. The name of the base model this * [PreTunedModel][google.cloud.aiplatform.v1beta1.PreTunedModel] was tuned * from. * </pre> * * <code>string base_model = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for baseModel. */ @java.lang.Override public com.google.protobuf.ByteString getBaseModelBytes() { java.lang.Object ref = baseModel_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); baseModel_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(tunedModelName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, tunedModelName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(checkpointId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, checkpointId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(baseModel_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, baseModel_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(tunedModelName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, tunedModelName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(checkpointId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, checkpointId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(baseModel_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, baseModel_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.PreTunedModel)) { return super.equals(obj); } com.google.cloud.aiplatform.v1beta1.PreTunedModel other = (com.google.cloud.aiplatform.v1beta1.PreTunedModel) obj; if (!getTunedModelName().equals(other.getTunedModelName())) return false; if (!getCheckpointId().equals(other.getCheckpointId())) return false; if (!getBaseModel().equals(other.getBaseModel())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + TUNED_MODEL_NAME_FIELD_NUMBER; hash = (53 * hash) + getTunedModelName().hashCode(); hash = (37 * hash) + CHECKPOINT_ID_FIELD_NUMBER; hash = (53 * hash) + getCheckpointId().hashCode(); hash = (37 * hash) + BASE_MODEL_FIELD_NUMBER; hash = (53 * hash) + getBaseModel().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1beta1.PreTunedModel parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.PreTunedModel parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.PreTunedModel parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.PreTunedModel parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.PreTunedModel parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.PreTunedModel parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.PreTunedModel parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.PreTunedModel parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.PreTunedModel parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.PreTunedModel parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.PreTunedModel parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.PreTunedModel parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.aiplatform.v1beta1.PreTunedModel prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A pre-tuned model for continuous tuning. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.PreTunedModel} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.PreTunedModel) com.google.cloud.aiplatform.v1beta1.PreTunedModelOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.TuningJobProto .internal_static_google_cloud_aiplatform_v1beta1_PreTunedModel_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.TuningJobProto .internal_static_google_cloud_aiplatform_v1beta1_PreTunedModel_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.PreTunedModel.class, com.google.cloud.aiplatform.v1beta1.PreTunedModel.Builder.class); } // Construct using com.google.cloud.aiplatform.v1beta1.PreTunedModel.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; tunedModelName_ = ""; checkpointId_ = ""; baseModel_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1beta1.TuningJobProto .internal_static_google_cloud_aiplatform_v1beta1_PreTunedModel_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.PreTunedModel getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1beta1.PreTunedModel.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.PreTunedModel build() { com.google.cloud.aiplatform.v1beta1.PreTunedModel result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.PreTunedModel buildPartial() { com.google.cloud.aiplatform.v1beta1.PreTunedModel result = new com.google.cloud.aiplatform.v1beta1.PreTunedModel(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.aiplatform.v1beta1.PreTunedModel result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.tunedModelName_ = tunedModelName_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.checkpointId_ = checkpointId_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.baseModel_ = baseModel_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1beta1.PreTunedModel) { return mergeFrom((com.google.cloud.aiplatform.v1beta1.PreTunedModel) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.PreTunedModel other) { if (other == com.google.cloud.aiplatform.v1beta1.PreTunedModel.getDefaultInstance()) return this; if (!other.getTunedModelName().isEmpty()) { tunedModelName_ = other.tunedModelName_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getCheckpointId().isEmpty()) { checkpointId_ = other.checkpointId_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getBaseModel().isEmpty()) { baseModel_ = other.baseModel_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { tunedModelName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { checkpointId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { baseModel_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object tunedModelName_ = ""; /** * * * <pre> * The resource name of the Model. * E.g., a model resource name with a specified version id or alias: * * `projects/{project}/locations/{location}/models/{model}&#64;{version_id}` * * `projects/{project}/locations/{location}/models/{model}&#64;{alias}` * * Or, omit the version id to use the default version: * * `projects/{project}/locations/{location}/models/{model}` * </pre> * * <code>string tuned_model_name = 1 [(.google.api.resource_reference) = { ... }</code> * * @return The tunedModelName. */ public java.lang.String getTunedModelName() { java.lang.Object ref = tunedModelName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); tunedModelName_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The resource name of the Model. * E.g., a model resource name with a specified version id or alias: * * `projects/{project}/locations/{location}/models/{model}&#64;{version_id}` * * `projects/{project}/locations/{location}/models/{model}&#64;{alias}` * * Or, omit the version id to use the default version: * * `projects/{project}/locations/{location}/models/{model}` * </pre> * * <code>string tuned_model_name = 1 [(.google.api.resource_reference) = { ... }</code> * * @return The bytes for tunedModelName. */ public com.google.protobuf.ByteString getTunedModelNameBytes() { java.lang.Object ref = tunedModelName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); tunedModelName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The resource name of the Model. * E.g., a model resource name with a specified version id or alias: * * `projects/{project}/locations/{location}/models/{model}&#64;{version_id}` * * `projects/{project}/locations/{location}/models/{model}&#64;{alias}` * * Or, omit the version id to use the default version: * * `projects/{project}/locations/{location}/models/{model}` * </pre> * * <code>string tuned_model_name = 1 [(.google.api.resource_reference) = { ... }</code> * * @param value The tunedModelName to set. * @return This builder for chaining. */ public Builder setTunedModelName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } tunedModelName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The resource name of the Model. * E.g., a model resource name with a specified version id or alias: * * `projects/{project}/locations/{location}/models/{model}&#64;{version_id}` * * `projects/{project}/locations/{location}/models/{model}&#64;{alias}` * * Or, omit the version id to use the default version: * * `projects/{project}/locations/{location}/models/{model}` * </pre> * * <code>string tuned_model_name = 1 [(.google.api.resource_reference) = { ... }</code> * * @return This builder for chaining. */ public Builder clearTunedModelName() { tunedModelName_ = getDefaultInstance().getTunedModelName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * The resource name of the Model. * E.g., a model resource name with a specified version id or alias: * * `projects/{project}/locations/{location}/models/{model}&#64;{version_id}` * * `projects/{project}/locations/{location}/models/{model}&#64;{alias}` * * Or, omit the version id to use the default version: * * `projects/{project}/locations/{location}/models/{model}` * </pre> * * <code>string tuned_model_name = 1 [(.google.api.resource_reference) = { ... }</code> * * @param value The bytes for tunedModelName to set. * @return This builder for chaining. */ public Builder setTunedModelNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); tunedModelName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object checkpointId_ = ""; /** * * * <pre> * Optional. The source checkpoint id. If not specified, the default * checkpoint will be used. * </pre> * * <code>string checkpoint_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The checkpointId. */ public java.lang.String getCheckpointId() { java.lang.Object ref = checkpointId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); checkpointId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. The source checkpoint id. If not specified, the default * checkpoint will be used. * </pre> * * <code>string checkpoint_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for checkpointId. */ public com.google.protobuf.ByteString getCheckpointIdBytes() { java.lang.Object ref = checkpointId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); checkpointId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. The source checkpoint id. If not specified, the default * checkpoint will be used. * </pre> * * <code>string checkpoint_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The checkpointId to set. * @return This builder for chaining. */ public Builder setCheckpointId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } checkpointId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. The source checkpoint id. If not specified, the default * checkpoint will be used. * </pre> * * <code>string checkpoint_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearCheckpointId() { checkpointId_ = getDefaultInstance().getCheckpointId(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Optional. The source checkpoint id. If not specified, the default * checkpoint will be used. * </pre> * * <code>string checkpoint_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for checkpointId to set. * @return This builder for chaining. */ public Builder setCheckpointIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); checkpointId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object baseModel_ = ""; /** * * * <pre> * Output only. The name of the base model this * [PreTunedModel][google.cloud.aiplatform.v1beta1.PreTunedModel] was tuned * from. * </pre> * * <code>string base_model = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The baseModel. */ public java.lang.String getBaseModel() { java.lang.Object ref = baseModel_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); baseModel_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Output only. The name of the base model this * [PreTunedModel][google.cloud.aiplatform.v1beta1.PreTunedModel] was tuned * from. * </pre> * * <code>string base_model = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for baseModel. */ public com.google.protobuf.ByteString getBaseModelBytes() { java.lang.Object ref = baseModel_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); baseModel_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Output only. The name of the base model this * [PreTunedModel][google.cloud.aiplatform.v1beta1.PreTunedModel] was tuned * from. * </pre> * * <code>string base_model = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The baseModel to set. * @return This builder for chaining. */ public Builder setBaseModel(java.lang.String value) { if (value == null) { throw new NullPointerException(); } baseModel_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Output only. The name of the base model this * [PreTunedModel][google.cloud.aiplatform.v1beta1.PreTunedModel] was tuned * from. * </pre> * * <code>string base_model = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return This builder for chaining. */ public Builder clearBaseModel() { baseModel_ = getDefaultInstance().getBaseModel(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Output only. The name of the base model this * [PreTunedModel][google.cloud.aiplatform.v1beta1.PreTunedModel] was tuned * from. * </pre> * * <code>string base_model = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The bytes for baseModel to set. * @return This builder for chaining. */ public Builder setBaseModelBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); baseModel_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.PreTunedModel) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.PreTunedModel) private static final com.google.cloud.aiplatform.v1beta1.PreTunedModel DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.PreTunedModel(); } public static com.google.cloud.aiplatform.v1beta1.PreTunedModel getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<PreTunedModel> PARSER = new com.google.protobuf.AbstractParser<PreTunedModel>() { @java.lang.Override public PreTunedModel parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<PreTunedModel> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<PreTunedModel> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.PreTunedModel getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/helix
35,369
helix-core/src/test/java/org/apache/helix/integration/task/TestAddDeleteTask.java
package org.apache.helix.integration.task; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Sets; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.helix.HelixException; import org.apache.helix.TestHelper; import org.apache.helix.integration.manager.ClusterControllerManager; import org.apache.helix.model.IdealState; import org.apache.helix.model.MasterSlaveSMD; import org.apache.helix.task.JobConfig; import org.apache.helix.task.JobContext; import org.apache.helix.task.TaskConfig; import org.apache.helix.task.TaskPartitionState; import org.apache.helix.task.TaskState; import org.apache.helix.task.TaskUtil; import org.apache.helix.task.Workflow; import org.apache.helix.task.WorkflowConfig; import org.apache.helix.task.WorkflowContext; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; public class TestAddDeleteTask extends TaskTestBase { private static final String DATABASE = "TestDB_" + TestHelper.getTestClassName(); @BeforeClass public void beforeClass() throws Exception { _numNodes = 3; super.beforeClass(); } @AfterClass public void afterClass() throws Exception { super.afterClass(); } @Test public void testAddDeleteTaskWorkflowMissing() throws Exception { String workflowName = TestHelper.getTestMethodName(); String jobName = "JOB0"; TaskConfig task = new TaskConfig(null, null, null, null); try { _driver.addTask(workflowName, jobName, task); Assert.fail("Exception is expected because workflow config is missing"); } catch (IllegalArgumentException e) { // IllegalArgumentException Exception is expected because workflow config is missing } try { _driver.deleteTask(workflowName, jobName, task.getId()); Assert.fail("Exception is expected because workflow config is missing"); } catch (IllegalArgumentException e) { // IllegalArgumentException Exception is expected because workflow config is missing } } @Test(dependsOnMethods = "testAddDeleteTaskWorkflowMissing") public void testAddDeleteTaskJobMissing() throws Exception { String workflowName = TestHelper.getTestMethodName(); String jobName = "JOB0"; Workflow.Builder workflowBuilder1 = new Workflow.Builder(workflowName); _driver.start(workflowBuilder1.build()); // Make sure workflow config and context have been created Assert.assertTrue(TestHelper.verify(() -> { WorkflowConfig config = _driver.getWorkflowConfig(workflowName); WorkflowContext context = _driver.getWorkflowContext(workflowName); return (config != null && context != null); }, TestHelper.WAIT_DURATION)); TaskConfig task = new TaskConfig(null, null, null, null); try { _driver.addTask(workflowName, jobName, task); Assert.fail("Exception is expected because job config is missing"); } catch (IllegalArgumentException e) { // IllegalArgumentException Exception is expected because job config is missing } try { _driver.deleteTask(workflowName, jobName, task.getId()); Assert.fail("Exception is expected because job config is missing"); } catch (IllegalArgumentException e) { // IllegalArgumentException Exception is expected because job config is missing } } @Test(dependsOnMethods = "testAddDeleteTaskJobMissing") public void testAddTaskToTargetedJob() throws Exception { String workflowName = TestHelper.getTestMethodName(); String jobName = "JOB0"; JobConfig.Builder jobBuilder1 = new JobConfig.Builder().setWorkflow(workflowName) .setNumberOfTasks(1).setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB) .setTargetPartitionStates(Sets.newHashSet("MASTER")).setNumConcurrentTasksPerInstance(100) .setCommand(MockTask.TASK_COMMAND) .setJobCommandConfigMap(ImmutableMap.of(MockTask.JOB_DELAY, "99999999")); Workflow.Builder workflowBuilder1 = new Workflow.Builder(workflowName).addJob(jobName, jobBuilder1); _driver.start(workflowBuilder1.build()); // Make sure workflow config and context have been created Assert.assertTrue(TestHelper.verify(() -> { WorkflowConfig config = _driver.getWorkflowConfig(workflowName); WorkflowContext context = _driver.getWorkflowContext(workflowName); return (config != null && context != null); }, TestHelper.WAIT_DURATION)); _driver.pollForJobState(workflowName, TaskUtil.getNamespacedJobName(workflowName, jobName), TaskState.IN_PROGRESS); TaskConfig task = new TaskConfig(null, null, null, null); try { _driver.addTask(workflowName, jobName, task); Assert.fail("Exception is expected because job is targeted"); } catch (HelixException e) { // Helix Exception is expected because job is targeted } _driver.stop(workflowName); } @Test(dependsOnMethods = "testAddTaskToTargetedJob") public void testAddTaskJobAndTaskCommand() throws Exception { String workflowName = TestHelper.getTestMethodName(); String jobName = "JOB0"; JobConfig.Builder jobBuilder1 = new JobConfig.Builder().setWorkflow(workflowName) .setNumberOfTasks(1).setNumConcurrentTasksPerInstance(100).setCommand(MockTask.TASK_COMMAND) .setJobCommandConfigMap(ImmutableMap.of(MockTask.JOB_DELAY, "99999999")); Workflow.Builder workflowBuilder1 = new Workflow.Builder(workflowName).addJob(jobName, jobBuilder1); _driver.start(workflowBuilder1.build()); _driver.pollForJobState(workflowName, TaskUtil.getNamespacedJobName(workflowName, jobName), TaskState.IN_PROGRESS); // Make sure workflow config and context have been created Assert.assertTrue(TestHelper.verify(() -> { WorkflowConfig config = _driver.getWorkflowConfig(workflowName); WorkflowContext context = _driver.getWorkflowContext(workflowName); return (config != null && context != null); }, TestHelper.WAIT_DURATION)); TaskConfig task = new TaskConfig("dummy", null, null, null); try { _driver.addTask(workflowName, jobName, task); Assert.fail("Exception is expected because job and task both have command field"); } catch (HelixException e) { // Helix Exception is expected job config and new task have command field } _driver.stop(workflowName); } @Test(dependsOnMethods = "testAddTaskJobAndTaskCommand") public void testAddTaskJobNotRunning() throws Exception { String workflowName = TestHelper.getTestMethodName(); String jobName = "JOB0"; JobConfig.Builder jobBuilder1 = new JobConfig.Builder().setWorkflow(workflowName) .setNumberOfTasks(1).setNumConcurrentTasksPerInstance(100).setCommand(MockTask.TASK_COMMAND) .setJobCommandConfigMap(ImmutableMap.of(MockTask.JOB_DELAY, "1000")); Workflow.Builder workflowBuilder1 = new Workflow.Builder(workflowName).addJob(jobName, jobBuilder1); _driver.start(workflowBuilder1.build()); // Make sure workflow config and context have been created Assert.assertTrue(TestHelper.verify(() -> { WorkflowConfig config = _driver.getWorkflowConfig(workflowName); WorkflowContext context = _driver.getWorkflowContext(workflowName); return (config != null && context != null); }, TestHelper.WAIT_DURATION)); _driver.pollForJobState(workflowName, TaskUtil.getNamespacedJobName(workflowName, jobName), TaskState.COMPLETED); TaskConfig task = new TaskConfig(null, null, null, null); try { _driver.addTask(workflowName, jobName, task); Assert.fail("Exception is expected because job is not running"); } catch (HelixException e) { // Helix Exception is expected because job id not running } } @Test(dependsOnMethods = "testAddTaskJobNotRunning") public void testAddTaskWithNullConfig() throws Exception { String workflowName = TestHelper.getTestMethodName(); String jobName = "JOB0"; JobConfig.Builder jobBuilder1 = new JobConfig.Builder().setWorkflow(workflowName) .setNumberOfTasks(1).setNumConcurrentTasksPerInstance(100).setCommand(MockTask.TASK_COMMAND) .setJobCommandConfigMap(ImmutableMap.of(MockTask.JOB_DELAY, "99999999")); Workflow.Builder workflowBuilder1 = new Workflow.Builder(workflowName).addJob(jobName, jobBuilder1); _driver.start(workflowBuilder1.build()); // Make sure workflow config and context have been created Assert.assertTrue(TestHelper.verify(() -> { WorkflowConfig config = _driver.getWorkflowConfig(workflowName); WorkflowContext context = _driver.getWorkflowContext(workflowName); return (config != null && context != null); }, TestHelper.WAIT_DURATION)); _driver.pollForJobState(workflowName, TaskUtil.getNamespacedJobName(workflowName, jobName), TaskState.IN_PROGRESS); try { _driver.addTask(workflowName, jobName, null); Assert.fail("Exception is expected because task config is null"); } catch (IllegalArgumentException e) { // IllegalArgumentException Exception is expected because task config is null } _driver.stop(workflowName); } @Test(dependsOnMethods = "testAddTaskWithNullConfig") public void testAddTaskSuccessfully() throws Exception { String workflowName = TestHelper.getTestMethodName(); String jobName = "JOB0"; JobConfig.Builder jobBuilder1 = new JobConfig.Builder().setWorkflow(workflowName) .setNumberOfTasks(1).setNumConcurrentTasksPerInstance(100).setCommand(MockTask.TASK_COMMAND) .setJobCommandConfigMap(ImmutableMap.of(MockTask.JOB_DELAY, "99999999")); Workflow.Builder workflowBuilder1 = new Workflow.Builder(workflowName).addJob(jobName, jobBuilder1); _driver.start(workflowBuilder1.build()); _driver.pollForJobState(workflowName, TaskUtil.getNamespacedJobName(workflowName, jobName), TaskState.IN_PROGRESS); // Add short running task Map<String, String> newTaskConfig = new HashMap<String, String>(ImmutableMap.of(MockTask.JOB_DELAY, "1000")); TaskConfig task = new TaskConfig(null, newTaskConfig, null, null); _driver.addTask(workflowName, jobName, task); Assert.assertTrue(TestHelper.verify(() -> { JobContext jobContext = _driver.getJobContext(TaskUtil.getNamespacedJobName(workflowName, jobName)); TaskPartitionState state = jobContext.getPartitionState(1); return (jobContext != null && state == TaskPartitionState.COMPLETED); }, TestHelper.WAIT_DURATION)); _driver.stop(workflowName); } @Test(dependsOnMethods = "testAddTaskSuccessfully") public void testAddTaskTwice() throws Exception { String workflowName = TestHelper.getTestMethodName(); String jobName = "JOB0"; JobConfig.Builder jobBuilder1 = new JobConfig.Builder().setWorkflow(workflowName) .setNumberOfTasks(1).setNumConcurrentTasksPerInstance(100).setCommand(MockTask.TASK_COMMAND) .setJobCommandConfigMap(ImmutableMap.of(MockTask.JOB_DELAY, "99999999")); Workflow.Builder workflowBuilder1 = new Workflow.Builder(workflowName).addJob(jobName, jobBuilder1); _driver.start(workflowBuilder1.build()); _driver.pollForJobState(workflowName, TaskUtil.getNamespacedJobName(workflowName, jobName), TaskState.IN_PROGRESS); // Add short running task Map<String, String> newTaskConfig = new HashMap<String, String>(ImmutableMap.of(MockTask.JOB_DELAY, "1000")); TaskConfig task = new TaskConfig(null, newTaskConfig, null, null); _driver.addTask(workflowName, jobName, task); try { _driver.addTask(workflowName, jobName, task); Assert.fail("Exception is expected because task is being added multiple times"); } catch (HelixException e) { // Helix Exception is expected because task is being added multiple times } Assert.assertTrue(TestHelper.verify(() -> { JobContext jobContext = _driver.getJobContext(TaskUtil.getNamespacedJobName(workflowName, jobName)); TaskPartitionState state = jobContext.getPartitionState(1); return (jobContext != null && state == TaskPartitionState.COMPLETED); }, TestHelper.WAIT_DURATION)); _driver.stop(workflowName); } @Test(dependsOnMethods = "testAddTaskTwice") public void testAddTaskToJobNotStarted() throws Exception { String workflowName = TestHelper.getTestMethodName(); String jobName = "JOB0"; JobConfig.Builder jobBuilder1 = new JobConfig.Builder().setWorkflow(workflowName) .setExecutionDelay(5000L).setNumberOfTasks(1).setNumConcurrentTasksPerInstance(100) .setCommand(MockTask.TASK_COMMAND) .setJobCommandConfigMap(ImmutableMap.of(MockTask.JOB_DELAY, "1000")); Workflow.Builder workflowBuilder1 = new Workflow.Builder(workflowName).addJob(jobName, jobBuilder1); _driver.start(workflowBuilder1.build()); Assert.assertTrue(TestHelper.verify(() -> { WorkflowContext workflowContext = _driver.getWorkflowContext(workflowName); JobContext jobContext = _driver.getJobContext(TaskUtil.getNamespacedJobName(workflowName, jobName)); return (workflowContext != null && jobContext == null); }, TestHelper.WAIT_DURATION)); // Add short running task Map<String, String> newTaskConfig = new HashMap<String, String>(ImmutableMap.of(MockTask.JOB_DELAY, "1000")); TaskConfig task = new TaskConfig(null, newTaskConfig, null, null); _driver.addTask(workflowName, jobName, task); Assert.assertTrue(TestHelper.verify(() -> { JobContext jobContext = _driver.getJobContext(TaskUtil.getNamespacedJobName(workflowName, jobName)); if (jobContext == null) { return false; } TaskPartitionState state = jobContext.getPartitionState(1); if (state == null) { return false; } return (state == TaskPartitionState.COMPLETED); }, TestHelper.WAIT_DURATION)); _driver.pollForWorkflowState(workflowName, TaskState.COMPLETED); } @Test(dependsOnMethods = "testAddTaskToJobNotStarted") public void testAddTaskWorkflowAndJobNotStarted() throws Exception { String workflowName = TestHelper.getTestMethodName(); String jobName = "JOB0"; JobConfig.Builder jobBuilder1 = new JobConfig.Builder().setWorkflow(workflowName) .setNumberOfTasks(1).setNumConcurrentTasksPerInstance(100).setCommand(MockTask.TASK_COMMAND) .setJobCommandConfigMap(ImmutableMap.of(MockTask.JOB_DELAY, "1000")); Workflow.Builder workflowBuilder1 = new Workflow.Builder(workflowName).addJob(jobName, jobBuilder1); _controller.syncStop(); _driver.start(workflowBuilder1.build()); Assert.assertTrue(TestHelper.verify(() -> { WorkflowContext workflowContext = _driver.getWorkflowContext(workflowName); JobContext jobContext = _driver.getJobContext(TaskUtil.getNamespacedJobName(workflowName, jobName)); return (workflowContext == null && jobContext == null); }, TestHelper.WAIT_DURATION)); // Add short running task Map<String, String> newTaskConfig = new HashMap<String, String>(ImmutableMap.of(MockTask.JOB_DELAY, "1000")); TaskConfig task = new TaskConfig(null, newTaskConfig, null, null); _driver.addTask(workflowName, jobName, task); // Start the Controller String controllerName = CONTROLLER_PREFIX + "_0"; _controller = new ClusterControllerManager(ZK_ADDR, CLUSTER_NAME, controllerName); _controller.syncStart(); _driver.pollForWorkflowState(workflowName, TaskState.COMPLETED); } @Test(dependsOnMethods = "testAddTaskWorkflowAndJobNotStarted") public void testDeleteNonExistedTask() throws Exception { String workflowName = TestHelper.getTestMethodName(); String jobName = "JOB0"; JobConfig.Builder jobBuilder1 = new JobConfig.Builder().setWorkflow(workflowName) .setNumberOfTasks(1).setNumConcurrentTasksPerInstance(100).setCommand(MockTask.TASK_COMMAND) .setJobCommandConfigMap(ImmutableMap.of(MockTask.JOB_DELAY, "9999999")); Workflow.Builder workflowBuilder1 = new Workflow.Builder(workflowName).addJob(jobName, jobBuilder1); _driver.start(workflowBuilder1.build()); _driver.pollForJobState(workflowName, TaskUtil.getNamespacedJobName(workflowName, jobName), TaskState.IN_PROGRESS); String dummyID = "1234"; try { _driver.deleteTask(workflowName, jobName, dummyID); Assert.fail("Exception is expected because a task with such ID does not exists!"); } catch (IllegalArgumentException e) { // IllegalArgumentException Exception is expected because task with such ID does not exists } _driver.waitToStop(workflowName, TestHelper.WAIT_DURATION); } @Test(dependsOnMethods = "testDeleteNonExistedTask") public void testDeleteTaskFromJobNotStarted() throws Exception { String workflowName = TestHelper.getTestMethodName(); String jobName = "JOB0"; JobConfig.Builder jobBuilder1 = new JobConfig.Builder().setWorkflow(workflowName) .setExecutionDelay(500000L).setNumberOfTasks(1).setNumConcurrentTasksPerInstance(100) .setCommand(MockTask.TASK_COMMAND) .setJobCommandConfigMap(ImmutableMap.of(MockTask.JOB_DELAY, "1000")); Workflow.Builder workflowBuilder1 = new Workflow.Builder(workflowName).addJob(jobName, jobBuilder1); _driver.start(workflowBuilder1.build()); Assert.assertTrue(TestHelper.verify(() -> { WorkflowContext workflowContext = _driver.getWorkflowContext(workflowName); JobContext jobContext = _driver.getJobContext(TaskUtil.getNamespacedJobName(workflowName, jobName)); return (workflowContext != null && jobContext == null); }, TestHelper.WAIT_DURATION)); // Add short running task Map<String, String> newTaskConfig = new HashMap<String, String>(ImmutableMap.of(MockTask.JOB_DELAY, "1000")); TaskConfig task = new TaskConfig(null, newTaskConfig, null, null); _driver.addTask(workflowName, jobName, task); JobConfig jobConfig = _driver.getJobConfig(TaskUtil.getNamespacedJobName(workflowName, jobName)); // Make sure task has been added to the job config Assert.assertTrue(jobConfig.getMapConfigs().containsKey(task.getId())); _driver.deleteTask(workflowName, jobName, task.getId()); jobConfig = _driver.getJobConfig(TaskUtil.getNamespacedJobName(workflowName, jobName)); // Make sure task has been removed from job config Assert.assertFalse(jobConfig.getMapConfigs().containsKey(task.getId())); _driver.deleteAndWaitForCompletion(workflowName, TestHelper.WAIT_DURATION); } @Test(dependsOnMethods = "testDeleteTaskFromJobNotStarted") public void testAddAndDeleteTask() throws Exception { String workflowName = TestHelper.getTestMethodName(); String jobName = "JOB0"; JobConfig.Builder jobBuilder1 = new JobConfig.Builder().setWorkflow(workflowName) .setNumberOfTasks(1).setNumConcurrentTasksPerInstance(100).setCommand(MockTask.TASK_COMMAND) .setJobCommandConfigMap(ImmutableMap.of(MockTask.JOB_DELAY, "99999999")); Workflow.Builder workflowBuilder1 = new Workflow.Builder(workflowName).addJob(jobName, jobBuilder1); _driver.start(workflowBuilder1.build()); _driver.pollForJobState(workflowName, TaskUtil.getNamespacedJobName(workflowName, jobName), TaskState.IN_PROGRESS); // Wait until initial task goes to RUNNING state Assert.assertTrue(TestHelper.verify(() -> { JobContext jobContext = _driver.getJobContext(TaskUtil.getNamespacedJobName(workflowName, jobName)); if (jobContext == null) { return false; } TaskPartitionState state = jobContext.getPartitionState(0); if (state == null) { return false; } return (state == TaskPartitionState.RUNNING); }, TestHelper.WAIT_DURATION)); // Add new task Map<String, String> newTaskConfig = new HashMap<String, String>(ImmutableMap.of(MockTask.JOB_DELAY, "99999999")); TaskConfig task = new TaskConfig(null, newTaskConfig, null, null); _driver.addTask(workflowName, jobName, task); // Wait until new task goes to RUNNING state Assert.assertTrue(TestHelper.verify(() -> { JobContext jobContext = _driver.getJobContext(TaskUtil.getNamespacedJobName(workflowName, jobName)); if (jobContext == null) { return false; } TaskPartitionState state = jobContext.getPartitionState(1); if (state == null) { return false; } return (state == TaskPartitionState.RUNNING); }, TestHelper.WAIT_DURATION)); _driver.deleteTask(workflowName, jobName, task.getId()); JobConfig jobConfig = _driver.getJobConfig(TaskUtil.getNamespacedJobName(workflowName, jobName)); // Make sure task has been removed from job config Assert.assertFalse(jobConfig.getMapConfigs().containsKey(task.getId())); Assert.assertTrue(TestHelper.verify(() -> { JobContext jobContext = _driver.getJobContext(TaskUtil.getNamespacedJobName(workflowName, jobName)); if (jobContext == null) { return false; } return (!jobContext.getPartitionSet().contains(1)); }, TestHelper.WAIT_DURATION)); _driver.stop(workflowName); } @Test(dependsOnMethods = "testAddAndDeleteTask") public void testDeleteTaskAndJobCompleted() throws Exception { String workflowName = TestHelper.getTestMethodName(); String jobName = "JOB0"; JobConfig.Builder jobBuilder1 = new JobConfig.Builder().setWorkflow(workflowName) .setNumberOfTasks(1).setNumConcurrentTasksPerInstance(100).setCommand(MockTask.TASK_COMMAND) .setJobCommandConfigMap(ImmutableMap.of(MockTask.JOB_DELAY, "20000")); Workflow.Builder workflowBuilder1 = new Workflow.Builder(workflowName).addJob(jobName, jobBuilder1); _driver.start(workflowBuilder1.build()); _driver.pollForJobState(workflowName, TaskUtil.getNamespacedJobName(workflowName, jobName), TaskState.IN_PROGRESS); // Wait until initial task goes to RUNNING state Assert.assertTrue(TestHelper.verify(() -> { JobContext jobContext = _driver.getJobContext(TaskUtil.getNamespacedJobName(workflowName, jobName)); if (jobContext == null) { return false; } TaskPartitionState state = jobContext.getPartitionState(0); if (state == null) { return false; } return (state == TaskPartitionState.RUNNING); }, TestHelper.WAIT_DURATION)); // Add new task Map<String, String> taskConfig1 = new HashMap<String, String>(ImmutableMap.of(MockTask.JOB_DELAY, "99999999")); Map<String, String> taskConfig2 = new HashMap<String, String>(ImmutableMap.of(MockTask.JOB_DELAY, "99999999")); TaskConfig task1 = new TaskConfig(null, taskConfig1, null, null); TaskConfig task2 = new TaskConfig(null, taskConfig2, null, null); _driver.addTask(workflowName, jobName, task1); _driver.addTask(workflowName, jobName, task2); // Wait until new task goes to RUNNING state Assert.assertTrue(TestHelper.verify(() -> { JobContext jobContext = _driver.getJobContext(TaskUtil.getNamespacedJobName(workflowName, jobName)); if (jobContext == null) { return false; } TaskPartitionState state1 = jobContext.getPartitionState(1); TaskPartitionState state2 = jobContext.getPartitionState(2); if (state1 == null && state2 == null) { return false; } return (state1 == TaskPartitionState.RUNNING && state2 == TaskPartitionState.RUNNING); }, TestHelper.WAIT_DURATION)); _driver.deleteTask(workflowName, jobName, task1.getId()); _driver.deleteTask(workflowName, jobName, task2.getId()); JobConfig jobConfig = _driver.getJobConfig(TaskUtil.getNamespacedJobName(workflowName, jobName)); // Make sure task has been removed from job config Assert.assertFalse(jobConfig.getMapConfigs().containsKey(task1.getId())); Assert.assertFalse(jobConfig.getMapConfigs().containsKey(task2.getId())); Assert.assertTrue(TestHelper.verify(() -> { JobContext jobContext = _driver.getJobContext(TaskUtil.getNamespacedJobName(workflowName, jobName)); if (jobContext == null) { return false; } return (!jobContext.getPartitionSet().contains(1) && !jobContext.getPartitionSet().contains(2)); }, TestHelper.WAIT_DURATION)); _driver.pollForJobState(workflowName, TaskUtil.getNamespacedJobName(workflowName, jobName), TaskState.COMPLETED); _driver.pollForWorkflowState(workflowName, TaskState.COMPLETED); } @Test(dependsOnMethods = "testDeleteTaskAndJobCompleted") public void testDeleteMiddleTaskAndAdd() throws Exception { String workflowName = TestHelper.getTestMethodName(); String jobName = "JOB0"; JobConfig.Builder jobBuilder1 = new JobConfig.Builder().setWorkflow(workflowName) .setNumberOfTasks(1).setNumConcurrentTasksPerInstance(100).setCommand(MockTask.TASK_COMMAND) .setJobCommandConfigMap(ImmutableMap.of(MockTask.JOB_DELAY, "20000")); Workflow.Builder workflowBuilder1 = new Workflow.Builder(workflowName).addJob(jobName, jobBuilder1); _driver.start(workflowBuilder1.build()); _driver.pollForJobState(workflowName, TaskUtil.getNamespacedJobName(workflowName, jobName), TaskState.IN_PROGRESS); // Wait until initial task goes to RUNNING state Assert.assertTrue(TestHelper.verify(() -> { JobContext jobContext = _driver.getJobContext(TaskUtil.getNamespacedJobName(workflowName, jobName)); if (jobContext == null) { return false; } TaskPartitionState state = jobContext.getPartitionState(0); if (state == null) { return false; } return (state == TaskPartitionState.RUNNING); }, TestHelper.WAIT_DURATION)); // Only one task (initial task) should be included in the job Assert.assertTrue(TestHelper.verify(() -> { JobContext jobContext = _driver.getJobContext(TaskUtil.getNamespacedJobName(workflowName, jobName)); return (jobContext.getPartitionSet().size() == 1); }, TestHelper.WAIT_DURATION)); // Add new tasks Map<String, String> taskConfig1 = new HashMap<String, String>(ImmutableMap.of(MockTask.JOB_DELAY, "99999999")); Map<String, String> taskConfig2 = new HashMap<String, String>(ImmutableMap.of(MockTask.JOB_DELAY, "99999999")); Map<String, String> taskConfig3 = new HashMap<String, String>(ImmutableMap.of(MockTask.JOB_DELAY, "99999999")); Map<String, String> taskConfig4 = new HashMap<String, String>(ImmutableMap.of(MockTask.JOB_DELAY, "99999999")); TaskConfig task1 = new TaskConfig(null, taskConfig1, null, null); TaskConfig task2 = new TaskConfig(null, taskConfig2, null, null); TaskConfig task3 = new TaskConfig(null, taskConfig3, null, null); TaskConfig task4 = new TaskConfig(null, taskConfig4, null, null); _driver.addTask(workflowName, jobName, task1); _driver.addTask(workflowName, jobName, task2); _driver.addTask(workflowName, jobName, task3); _driver.addTask(workflowName, jobName, task4); // 5 tasks should be included in the job Assert.assertTrue(TestHelper.verify(() -> { JobContext jobContext = _driver.getJobContext(TaskUtil.getNamespacedJobName(workflowName, jobName)); return (jobContext.getPartitionSet().size() == 5); }, TestHelper.WAIT_DURATION)); // All Task should be in RUNNING Assert.assertTrue(TestHelper.verify(() -> { JobContext jobContext = _driver.getJobContext(TaskUtil.getNamespacedJobName(workflowName, jobName)); int runningTasks = 0; for (Integer pId: jobContext.getPartitionSet()) { if (jobContext.getPartitionState(pId) == TaskPartitionState.RUNNING) { runningTasks++; } } return (runningTasks == 5); }, TestHelper.WAIT_DURATION)); _driver.deleteTask(workflowName, jobName, task3.getId()); // Since one of the tasks had been delete, we should expect 4 tasks in the context Assert.assertTrue(TestHelper.verify(() -> { JobContext jobContext = _driver.getJobContext(TaskUtil.getNamespacedJobName(workflowName, jobName)); return (jobContext.getPartitionSet().size() == 4); }, TestHelper.WAIT_DURATION)); // Add new tasks and make sure the task is being added to context Map<String, String> taskConfig5 = new HashMap<String, String>(ImmutableMap.of(MockTask.JOB_DELAY, "99999999")); TaskConfig task5 = new TaskConfig(null, taskConfig5, null, null); _driver.addTask(workflowName, jobName, task5); Assert.assertTrue(TestHelper.verify(() -> { JobContext jobContext = _driver.getJobContext(TaskUtil.getNamespacedJobName(workflowName, jobName)); return (jobContext.getPartitionSet().size() == 5); }, TestHelper.WAIT_DURATION)); _driver.stop(workflowName); } @Test(dependsOnMethods = "testDeleteTaskAndJobCompleted") public void testPartitionDropTargetedJob() throws Exception { String workflowName = TestHelper.getTestMethodName(); String jobName = "JOB0"; _gSetupTool.addResourceToCluster(CLUSTER_NAME, DATABASE, 3, MASTER_SLAVE_STATE_MODEL, IdealState.RebalanceMode.SEMI_AUTO.name()); _gSetupTool.rebalanceResource(CLUSTER_NAME, DATABASE, 3); List<String> preferenceList = new ArrayList<>(); preferenceList.add(PARTICIPANT_PREFIX + "_" + (_startPort + 0)); preferenceList.add(PARTICIPANT_PREFIX + "_" + (_startPort + 1)); preferenceList.add(PARTICIPANT_PREFIX + "_" + (_startPort + 2)); IdealState idealState = new IdealState(DATABASE); idealState.setPreferenceList(DATABASE + "_0", preferenceList); idealState.setPreferenceList(DATABASE + "_1", preferenceList); idealState.setPreferenceList(DATABASE + "_2", preferenceList); _gSetupTool.getClusterManagementTool().updateIdealState(CLUSTER_NAME, DATABASE, idealState); JobConfig.Builder jobBuilder1 = new JobConfig.Builder().setWorkflow(workflowName) .setTargetResource(DATABASE) .setTargetPartitionStates(Sets.newHashSet(MasterSlaveSMD.States.MASTER.name())) .setNumberOfTasks(1).setNumConcurrentTasksPerInstance(100).setCommand(MockTask.TASK_COMMAND) .setJobCommandConfigMap(ImmutableMap.of(MockTask.JOB_DELAY, "10000")); Workflow.Builder workflowBuilder1 = new Workflow.Builder(workflowName).addJob(jobName, jobBuilder1); _driver.start(workflowBuilder1.build()); _driver.pollForJobState(workflowName, TaskUtil.getNamespacedJobName(workflowName, jobName), TaskState.IN_PROGRESS); // Wait until new task goes to RUNNING state Assert.assertTrue(TestHelper.verify(() -> { JobContext jobContext = _driver.getJobContext(TaskUtil.getNamespacedJobName(workflowName, jobName)); if (jobContext == null) { return false; } TaskPartitionState state1 = jobContext.getPartitionState(0); TaskPartitionState state2 = jobContext.getPartitionState(1); TaskPartitionState state3 = jobContext.getPartitionState(2); if (state1 == null || state2 == null || state3 == null) { return false; } return (state1 == TaskPartitionState.RUNNING && state2 == TaskPartitionState.RUNNING && state3 == TaskPartitionState.RUNNING); }, TestHelper.WAIT_DURATION)); // Remove one partition from the IS idealState = new IdealState(DATABASE); idealState.setPreferenceList(DATABASE + "_1", preferenceList); _gSetupTool.getClusterManagementTool().removeFromIdealState(CLUSTER_NAME, DATABASE, idealState); Assert.assertTrue(TestHelper .verify(() -> ((_driver.getJobContext(TaskUtil.getNamespacedJobName(workflowName, jobName)) .getPartitionSet().size() == 2)), TestHelper.WAIT_DURATION)); _driver.pollForWorkflowState(workflowName, TaskState.COMPLETED); } @Test(dependsOnMethods = "testPartitionDropTargetedJob") public void testAddDeleteTaskOneInstance() throws Exception { // Stop all participant other than participant 0 for (int i = 1; i < _numNodes; i++) { super.stopParticipant(i); Assert.assertFalse(_participants[i].isConnected()); } String workflowName = TestHelper.getTestMethodName(); String jobName = "JOB0"; JobConfig.Builder jobBuilder1 = new JobConfig.Builder().setWorkflow(workflowName) .setNumberOfTasks(1).setNumConcurrentTasksPerInstance(1).setCommand(MockTask.TASK_COMMAND) .setJobCommandConfigMap(ImmutableMap.of(MockTask.JOB_DELAY, "99999999")); Workflow.Builder workflowBuilder1 = new Workflow.Builder(workflowName).addJob(jobName, jobBuilder1); _driver.start(workflowBuilder1.build()); _driver.pollForJobState(workflowName, TaskUtil.getNamespacedJobName(workflowName, jobName), TaskState.IN_PROGRESS); // Wait until initial task goes to RUNNING state Assert.assertTrue(TestHelper.verify(() -> { JobContext jobContext = _driver.getJobContext(TaskUtil.getNamespacedJobName(workflowName, jobName)); if (jobContext == null) { return false; } TaskPartitionState state = jobContext.getPartitionState(0); if (state == null) { return false; } return (state == TaskPartitionState.RUNNING); }, TestHelper.WAIT_DURATION)); // Add new task Map<String, String> newTaskConfig = new HashMap<String, String>(ImmutableMap.of(MockTask.JOB_DELAY, "99999999")); TaskConfig task = new TaskConfig(null, newTaskConfig, null, null); _driver.addTask(workflowName, jobName, task); Assert.assertEquals(_driver.getJobContext(TaskUtil.getNamespacedJobName(workflowName, jobName)) .getPartitionSet().size(), 2); // Since only one task is allowed per instance, the new task should be scheduled Assert.assertNull(_driver.getJobContext(TaskUtil.getNamespacedJobName(workflowName, jobName)) .getPartitionState(1)); _driver.deleteTask(workflowName, jobName, task.getId()); Assert.assertEquals(_driver.getJobContext(TaskUtil.getNamespacedJobName(workflowName, jobName)) .getPartitionSet().size(), 1); _driver.stop(workflowName); } }
googleapis/google-cloud-java
34,066
java-gkehub/proto-google-cloud-gkehub-v1/src/main/java/com/google/cloud/gkehub/configmanagement/v1/ConfigManagementProto.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/gkehub/v1/configmanagement/configmanagement.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.gkehub.configmanagement.v1; public final class ConfigManagementProto { private ConfigManagementProto() {} public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {} public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) { registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry); } static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gkehub_configmanagement_v1_MembershipState_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gkehub_configmanagement_v1_MembershipState_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gkehub_configmanagement_v1_MembershipSpec_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gkehub_configmanagement_v1_MembershipSpec_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gkehub_configmanagement_v1_ConfigSync_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gkehub_configmanagement_v1_ConfigSync_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gkehub_configmanagement_v1_GitConfig_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gkehub_configmanagement_v1_GitConfig_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gkehub_configmanagement_v1_OciConfig_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gkehub_configmanagement_v1_OciConfig_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gkehub_configmanagement_v1_PolicyController_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gkehub_configmanagement_v1_PolicyController_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gkehub_configmanagement_v1_HierarchyControllerConfig_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gkehub_configmanagement_v1_HierarchyControllerConfig_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gkehub_configmanagement_v1_HierarchyControllerDeploymentState_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gkehub_configmanagement_v1_HierarchyControllerDeploymentState_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gkehub_configmanagement_v1_HierarchyControllerVersion_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gkehub_configmanagement_v1_HierarchyControllerVersion_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gkehub_configmanagement_v1_HierarchyControllerState_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gkehub_configmanagement_v1_HierarchyControllerState_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gkehub_configmanagement_v1_OperatorState_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gkehub_configmanagement_v1_OperatorState_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gkehub_configmanagement_v1_InstallError_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gkehub_configmanagement_v1_InstallError_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gkehub_configmanagement_v1_ConfigSyncState_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gkehub_configmanagement_v1_ConfigSyncState_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gkehub_configmanagement_v1_ConfigSyncError_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gkehub_configmanagement_v1_ConfigSyncError_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gkehub_configmanagement_v1_ConfigSyncVersion_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gkehub_configmanagement_v1_ConfigSyncVersion_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gkehub_configmanagement_v1_ConfigSyncDeploymentState_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gkehub_configmanagement_v1_ConfigSyncDeploymentState_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gkehub_configmanagement_v1_SyncState_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gkehub_configmanagement_v1_SyncState_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gkehub_configmanagement_v1_SyncError_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gkehub_configmanagement_v1_SyncError_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gkehub_configmanagement_v1_ErrorResource_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gkehub_configmanagement_v1_ErrorResource_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gkehub_configmanagement_v1_GroupVersionKind_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gkehub_configmanagement_v1_GroupVersionKind_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gkehub_configmanagement_v1_PolicyControllerState_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gkehub_configmanagement_v1_PolicyControllerState_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gkehub_configmanagement_v1_PolicyControllerVersion_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gkehub_configmanagement_v1_PolicyControllerVersion_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gkehub_configmanagement_v1_GatekeeperDeploymentState_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gkehub_configmanagement_v1_GatekeeperDeploymentState_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n" + ">google/cloud/gkehub/v1/configmanagement/configmanagement.proto\022\'google.cloud.g" + "kehub.configmanagement.v1\032\037google/protobuf/timestamp.proto\"\346\003\n" + "\017MembershipState\022\024\n" + "\014cluster_name\030\001 \001(\t\022P\n" + "\017membership_spec\030\002" + " \001(\01327.google.cloud.gkehub.configmanagement.v1.MembershipSpec\022N\n" + "\016operator_state\030\003" + " \001(\01326.google.cloud.gkehub.configmanagement.v1.OperatorState\022S\n" + "\021config_sync_state\030\004" + " \001(\01328.google.cloud.gkehub.configmanagement.v1.ConfigSyncState\022_\n" + "\027policy_controller_state\030\005 \001(\0132>.google.cloud.gkeh" + "ub.configmanagement.v1.PolicyControllerState\022e\n" + "\032hierarchy_controller_state\030\007 \001(\013" + "2A.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState\"\347\003\n" + "\016MembershipSpec\022H\n" + "\013config_sync\030\001 \001(\01323.google.clo" + "ud.gkehub.configmanagement.v1.ConfigSync\022T\n" + "\021policy_controller\030\002 \001(\01329.google.clo" + "ud.gkehub.configmanagement.v1.PolicyController\022`\n" + "\024hierarchy_controller\030\004 \001(\0132B.g" + "oogle.cloud.gkehub.configmanagement.v1.HierarchyControllerConfig\022\017\n" + "\007version\030\n" + " \001(\t\022\017\n" + "\007cluster\030\013 \001(\t\022V\n\n" + "management\030\014 \001(\0162B" + ".google.cloud.gkehub.configmanagement.v1.MembershipSpec.Management\"Y\n\n" + "Management\022\032\n" + "\026MANAGEMENT_UNSPECIFIED\020\000\022\030\n" + "\024MANAGEMENT_AUTOMATIC\020\001\022\025\n" + "\021MANAGEMENT_MANUAL\020\002\"\211\002\n\n" + "ConfigSync\022?\n" + "\003git\030\007" + " \001(\01322.google.cloud.gkehub.configmanagement.v1.GitConfig\022\025\n" + "\r" + "source_format\030\010 \001(\t\022\024\n" + "\007enabled\030\n" + " \001(\010H\000\210\001\001\022\025\n\r" + "prevent_drift\030\013 \001(\010\022?\n" + "\003oci\030\014 \001(\01322.google.cloud.gkehub.configmanagement.v1.OciConfig\022)\n" + "!metrics_gcp_service_account_email\030\017 \001(\tB\n\n" + "\010_enabled\"\276\001\n" + "\tGitConfig\022\021\n" + "\tsync_repo\030\001 \001(\t\022\023\n" + "\013sync_branch\030\002 \001(\t\022\022\n\n" + "policy_dir\030\003 \001(\t\022\026\n" + "\016sync_wait_secs\030\004 \001(\003\022\020\n" + "\010sync_rev\030\005 \001(\t\022\023\n" + "\013secret_type\030\006 \001(\t\022\023\n" + "\013https_proxy\030\007 \001(\t\022!\n" + "\031gcp_service_account_email\030\010 \001(\t\"\202\001\n" + "\tOciConfig\022\021\n" + "\tsync_repo\030\001 \001(\t\022\022\n\n" + "policy_dir\030\002 \001(\t\022\026\n" + "\016sync_wait_secs\030\003 \001(\003\022\023\n" + "\013secret_type\030\004 \001(\t\022!\n" + "\031gcp_service_account_email\030\005 \001(\t\"\211\002\n" + "\020PolicyController\022\017\n" + "\007enabled\030\001 \001(\010\022\'\n" + "\032template_library_installed\030\002 \001(\010H\000\210\001\001\022#\n" + "\026audit_interval_seconds\030\003 \001(\003H\001\210\001\001\022\035\n" + "\025exemptable_namespaces\030\004 \003(\t\022!\n" + "\031referential_rules_enabled\030\005 \001(\010\022\032\n" + "\022log_denies_enabled\030\006 \001(\010B\035\n" + "\033_template_library_installedB\031\n" + "\027_audit_interval_seconds\"x\n" + "\031HierarchyControllerConfig\022\017\n" + "\007enabled\030\001 \001(\010\022\036\n" + "\026enable_pod_tree_labels\030\002 \001(\010\022*\n" + "\"enable_hierarchical_resource_quota\030\003 \001(\010\"\270\001\n" + "\"HierarchyControllerDeploymentState\022E\n" + "\003hnc\030\001 \001(\01628.goog" + "le.cloud.gkehub.configmanagement.v1.DeploymentState\022K\n" + "\textension\030\002 \001(\01628.google." + "cloud.gkehub.configmanagement.v1.DeploymentState\"<\n" + "\032HierarchyControllerVersion\022\013\n" + "\003hnc\030\001 \001(\t\022\021\n" + "\textension\030\002 \001(\t\"\314\001\n" + "\030HierarchyControllerState\022T\n" + "\007version\030\001 \001(\0132C.g" + "oogle.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion\022Z\n" + "\005state\030\002 \001(\0132K.google.cloud.gkehub.configmanagement." + "v1.HierarchyControllerDeploymentState\"\273\001\n\r" + "OperatorState\022\017\n" + "\007version\030\001 \001(\t\022R\n" + "\020deployment_state\030\002" + " \001(\01628.google.cloud.gkehub.configmanagement.v1.DeploymentState\022E\n" + "\006errors\030\003" + " \003(\01325.google.cloud.gkehub.configmanagement.v1.InstallError\"%\n" + "\014InstallError\022\025\n\r" + "error_message\030\001 \001(\t\"\304\006\n" + "\017ConfigSyncState\022K\n" + "\007version\030\001 \001(\0132:.google.cloud.g" + "kehub.configmanagement.v1.ConfigSyncVersion\022\\\n" + "\020deployment_state\030\002 \001(\0132B.google.c" + "loud.gkehub.configmanagement.v1.ConfigSyncDeploymentState\022F\n\n" + "sync_state\030\003 \001(\01322." + "google.cloud.gkehub.configmanagement.v1.SyncState\022H\n" + "\006errors\030\004 \003(\01328.google.cloud" + ".gkehub.configmanagement.v1.ConfigSyncError\022W\n" + "\014rootsync_crd\030\005 \001(\0162A.google.cloud" + ".gkehub.configmanagement.v1.ConfigSyncState.CRDState\022W\n" + "\014reposync_crd\030\006 \001(\0162A.goo" + "gle.cloud.gkehub.configmanagement.v1.ConfigSyncState.CRDState\022M\n" + "\005state\030\007 \001(\0162>.g" + "oogle.cloud.gkehub.configmanagement.v1.ConfigSyncState.State\"h\n" + "\010CRDState\022\031\n" + "\025CRD_STATE_UNSPECIFIED\020\000\022\021\n\r" + "NOT_INSTALLED\020\001\022\r" + "\n" + "\tINSTALLED\020\002\022\017\n" + "\013TERMINATING\020\003\022\016\n\n" + "INSTALLING\020\004\"\210\001\n" + "\005State\022\025\n" + "\021STATE_UNSPECIFIED\020\000\022\035\n" + "\031CONFIG_SYNC_NOT_INSTALLED\020\001\022\031\n" + "\025CONFIG_SYNC_INSTALLED\020\002\022\025\n" + "\021CONFIG_SYNC_ERROR\020\003\022\027\n" + "\023CONFIG_SYNC_PENDING\020\004\"(\n" + "\017ConfigSyncError\022\025\n\r" + "error_message\030\001 \001(\t\"\250\001\n" + "\021ConfigSyncVersion\022\020\n" + "\010importer\030\001 \001(\t\022\016\n" + "\006syncer\030\002 \001(\t\022\020\n" + "\010git_sync\030\003 \001(\t\022\017\n" + "\007monitor\030\004 \001(\t\022\032\n" + "\022reconciler_manager\030\005 \001(\t\022\027\n" + "\017root_reconciler\030\006 \001(\t\022\031\n" + "\021admission_webhook\030\007 \001(\t\"\306\004\n" + "\031ConfigSyncDeploymentState\022J\n" + "\010importer\030\001" + " \001(\01628.google.cloud.gkehub.configmanagement.v1.DeploymentState\022H\n" + "\006syncer\030\002 \001(\016" + "28.google.cloud.gkehub.configmanagement.v1.DeploymentState\022J\n" + "\010git_sync\030\003 \001(\01628.g" + "oogle.cloud.gkehub.configmanagement.v1.DeploymentState\022I\n" + "\007monitor\030\004 \001(\01628.google" + ".cloud.gkehub.configmanagement.v1.DeploymentState\022T\n" + "\022reconciler_manager\030\005 \001(\01628." + "google.cloud.gkehub.configmanagement.v1.DeploymentState\022Q\n" + "\017root_reconciler\030\006 \001(\016" + "28.google.cloud.gkehub.configmanagement.v1.DeploymentState\022S\n" + "\021admission_webhook\030\007" + " \001(\01628.google.cloud.gkehub.configmanagement.v1.DeploymentState\"\273\003\n" + "\tSyncState\022\024\n" + "\014source_token\030\001 \001(\t\022\024\n" + "\014import_token\030\002 \001(\t\022\022\n\n" + "sync_token\030\003 \001(\t\022\025\n" + "\tlast_sync\030\004 \001(\tB\002\030\001\0222\n" + "\016last_sync_time\030\007 \001(\0132\032.google.protobuf.Timestamp\022I\n" + "\004code\030\005 \001(\0162;.google." + "cloud.gkehub.configmanagement.v1.SyncState.SyncCode\022B\n" + "\006errors\030\006 \003(\01322.google.clo" + "ud.gkehub.configmanagement.v1.SyncError\"\223\001\n" + "\010SyncCode\022\031\n" + "\025SYNC_CODE_UNSPECIFIED\020\000\022\n\n" + "\006SYNCED\020\001\022\013\n" + "\007PENDING\020\002\022\t\n" + "\005ERROR\020\003\022\022\n" + "\016NOT_CONFIGURED\020\004\022\021\n\r" + "NOT_INSTALLED\020\005\022\020\n" + "\014UNAUTHORIZED\020\006\022\017\n" + "\013UNREACHABLE\020\007\"\201\001\n" + "\tSyncError\022\014\n" + "\004code\030\001 \001(\t\022\025\n\r" + "error_message\030\002 \001(\t\022O\n" + "\017error_resources\030\003 \003(\01326.google.cloud" + ".gkehub.configmanagement.v1.ErrorResource\"\250\001\n\r" + "ErrorResource\022\023\n" + "\013source_path\030\001 \001(\t\022\025\n\r" + "resource_name\030\002 \001(\t\022\032\n" + "\022resource_namespace\030\003 \001(\t\022O\n" + "\014resource_gvk\030\004 \001(\01329.goog" + "le.cloud.gkehub.configmanagement.v1.GroupVersionKind\"@\n" + "\020GroupVersionKind\022\r\n" + "\005group\030\001 \001(\t\022\017\n" + "\007version\030\002 \001(\t\022\014\n" + "\004kind\030\003 \001(\t\"\310\001\n" + "\025PolicyControllerState\022Q\n" + "\007version\030\001 \001(" + "\0132@.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion\022\\\n" + "\020deployment_state\030\002 \001(\0132B.google.cloud.gkehub.conf" + "igmanagement.v1.GatekeeperDeploymentState\"*\n" + "\027PolicyControllerVersion\022\017\n" + "\007version\030\001 \001(\t\"\326\001\n" + "\031GatekeeperDeploymentState\022e\n" + "#gatekeeper_controller_manager_state\030\001 \001(\016" + "28.google.cloud.gkehub.configmanagement.v1.DeploymentState\022R\n" + "\020gatekeeper_audit\030\002" + " \001(\01628.google.cloud.gkehub.configmanagement.v1.DeploymentState*m\n" + "\017DeploymentState\022 \n" + "\034DEPLOYMENT_STATE_UNSPECIFIED\020\000\022\021\n\r" + "NOT_INSTALLED\020\001\022\r\n" + "\tINSTALLED\020\002\022\t\n" + "\005ERROR\020\003\022\013\n" + "\007PENDING\020\004B\241\002\n" + "+com.google.cloud.gkehub.configmanagement.v1B\025ConfigManagementP" + "rotoP\001ZWcloud.google.com/go/gkehub/configmanagement/apiv1/configmanagementpb;con" + "figmanagementpb\252\002\'Google.Cloud.GkeHub.Co" + "nfigManagement.V1\312\002\'Google\\Cloud\\GkeHub\\" + "ConfigManagement\\V1\352\002+Google::Cloud::GkeHub::ConfigManagement::V1b\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { com.google.protobuf.TimestampProto.getDescriptor(), }); internal_static_google_cloud_gkehub_configmanagement_v1_MembershipState_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_google_cloud_gkehub_configmanagement_v1_MembershipState_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gkehub_configmanagement_v1_MembershipState_descriptor, new java.lang.String[] { "ClusterName", "MembershipSpec", "OperatorState", "ConfigSyncState", "PolicyControllerState", "HierarchyControllerState", }); internal_static_google_cloud_gkehub_configmanagement_v1_MembershipSpec_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_google_cloud_gkehub_configmanagement_v1_MembershipSpec_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gkehub_configmanagement_v1_MembershipSpec_descriptor, new java.lang.String[] { "ConfigSync", "PolicyController", "HierarchyController", "Version", "Cluster", "Management", }); internal_static_google_cloud_gkehub_configmanagement_v1_ConfigSync_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_google_cloud_gkehub_configmanagement_v1_ConfigSync_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gkehub_configmanagement_v1_ConfigSync_descriptor, new java.lang.String[] { "Git", "SourceFormat", "Enabled", "PreventDrift", "Oci", "MetricsGcpServiceAccountEmail", }); internal_static_google_cloud_gkehub_configmanagement_v1_GitConfig_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_google_cloud_gkehub_configmanagement_v1_GitConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gkehub_configmanagement_v1_GitConfig_descriptor, new java.lang.String[] { "SyncRepo", "SyncBranch", "PolicyDir", "SyncWaitSecs", "SyncRev", "SecretType", "HttpsProxy", "GcpServiceAccountEmail", }); internal_static_google_cloud_gkehub_configmanagement_v1_OciConfig_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_google_cloud_gkehub_configmanagement_v1_OciConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gkehub_configmanagement_v1_OciConfig_descriptor, new java.lang.String[] { "SyncRepo", "PolicyDir", "SyncWaitSecs", "SecretType", "GcpServiceAccountEmail", }); internal_static_google_cloud_gkehub_configmanagement_v1_PolicyController_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_google_cloud_gkehub_configmanagement_v1_PolicyController_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gkehub_configmanagement_v1_PolicyController_descriptor, new java.lang.String[] { "Enabled", "TemplateLibraryInstalled", "AuditIntervalSeconds", "ExemptableNamespaces", "ReferentialRulesEnabled", "LogDeniesEnabled", }); internal_static_google_cloud_gkehub_configmanagement_v1_HierarchyControllerConfig_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_google_cloud_gkehub_configmanagement_v1_HierarchyControllerConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gkehub_configmanagement_v1_HierarchyControllerConfig_descriptor, new java.lang.String[] { "Enabled", "EnablePodTreeLabels", "EnableHierarchicalResourceQuota", }); internal_static_google_cloud_gkehub_configmanagement_v1_HierarchyControllerDeploymentState_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_google_cloud_gkehub_configmanagement_v1_HierarchyControllerDeploymentState_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gkehub_configmanagement_v1_HierarchyControllerDeploymentState_descriptor, new java.lang.String[] { "Hnc", "Extension", }); internal_static_google_cloud_gkehub_configmanagement_v1_HierarchyControllerVersion_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_google_cloud_gkehub_configmanagement_v1_HierarchyControllerVersion_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gkehub_configmanagement_v1_HierarchyControllerVersion_descriptor, new java.lang.String[] { "Hnc", "Extension", }); internal_static_google_cloud_gkehub_configmanagement_v1_HierarchyControllerState_descriptor = getDescriptor().getMessageTypes().get(9); internal_static_google_cloud_gkehub_configmanagement_v1_HierarchyControllerState_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gkehub_configmanagement_v1_HierarchyControllerState_descriptor, new java.lang.String[] { "Version", "State", }); internal_static_google_cloud_gkehub_configmanagement_v1_OperatorState_descriptor = getDescriptor().getMessageTypes().get(10); internal_static_google_cloud_gkehub_configmanagement_v1_OperatorState_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gkehub_configmanagement_v1_OperatorState_descriptor, new java.lang.String[] { "Version", "DeploymentState", "Errors", }); internal_static_google_cloud_gkehub_configmanagement_v1_InstallError_descriptor = getDescriptor().getMessageTypes().get(11); internal_static_google_cloud_gkehub_configmanagement_v1_InstallError_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gkehub_configmanagement_v1_InstallError_descriptor, new java.lang.String[] { "ErrorMessage", }); internal_static_google_cloud_gkehub_configmanagement_v1_ConfigSyncState_descriptor = getDescriptor().getMessageTypes().get(12); internal_static_google_cloud_gkehub_configmanagement_v1_ConfigSyncState_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gkehub_configmanagement_v1_ConfigSyncState_descriptor, new java.lang.String[] { "Version", "DeploymentState", "SyncState", "Errors", "RootsyncCrd", "ReposyncCrd", "State", }); internal_static_google_cloud_gkehub_configmanagement_v1_ConfigSyncError_descriptor = getDescriptor().getMessageTypes().get(13); internal_static_google_cloud_gkehub_configmanagement_v1_ConfigSyncError_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gkehub_configmanagement_v1_ConfigSyncError_descriptor, new java.lang.String[] { "ErrorMessage", }); internal_static_google_cloud_gkehub_configmanagement_v1_ConfigSyncVersion_descriptor = getDescriptor().getMessageTypes().get(14); internal_static_google_cloud_gkehub_configmanagement_v1_ConfigSyncVersion_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gkehub_configmanagement_v1_ConfigSyncVersion_descriptor, new java.lang.String[] { "Importer", "Syncer", "GitSync", "Monitor", "ReconcilerManager", "RootReconciler", "AdmissionWebhook", }); internal_static_google_cloud_gkehub_configmanagement_v1_ConfigSyncDeploymentState_descriptor = getDescriptor().getMessageTypes().get(15); internal_static_google_cloud_gkehub_configmanagement_v1_ConfigSyncDeploymentState_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gkehub_configmanagement_v1_ConfigSyncDeploymentState_descriptor, new java.lang.String[] { "Importer", "Syncer", "GitSync", "Monitor", "ReconcilerManager", "RootReconciler", "AdmissionWebhook", }); internal_static_google_cloud_gkehub_configmanagement_v1_SyncState_descriptor = getDescriptor().getMessageTypes().get(16); internal_static_google_cloud_gkehub_configmanagement_v1_SyncState_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gkehub_configmanagement_v1_SyncState_descriptor, new java.lang.String[] { "SourceToken", "ImportToken", "SyncToken", "LastSync", "LastSyncTime", "Code", "Errors", }); internal_static_google_cloud_gkehub_configmanagement_v1_SyncError_descriptor = getDescriptor().getMessageTypes().get(17); internal_static_google_cloud_gkehub_configmanagement_v1_SyncError_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gkehub_configmanagement_v1_SyncError_descriptor, new java.lang.String[] { "Code", "ErrorMessage", "ErrorResources", }); internal_static_google_cloud_gkehub_configmanagement_v1_ErrorResource_descriptor = getDescriptor().getMessageTypes().get(18); internal_static_google_cloud_gkehub_configmanagement_v1_ErrorResource_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gkehub_configmanagement_v1_ErrorResource_descriptor, new java.lang.String[] { "SourcePath", "ResourceName", "ResourceNamespace", "ResourceGvk", }); internal_static_google_cloud_gkehub_configmanagement_v1_GroupVersionKind_descriptor = getDescriptor().getMessageTypes().get(19); internal_static_google_cloud_gkehub_configmanagement_v1_GroupVersionKind_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gkehub_configmanagement_v1_GroupVersionKind_descriptor, new java.lang.String[] { "Group", "Version", "Kind", }); internal_static_google_cloud_gkehub_configmanagement_v1_PolicyControllerState_descriptor = getDescriptor().getMessageTypes().get(20); internal_static_google_cloud_gkehub_configmanagement_v1_PolicyControllerState_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gkehub_configmanagement_v1_PolicyControllerState_descriptor, new java.lang.String[] { "Version", "DeploymentState", }); internal_static_google_cloud_gkehub_configmanagement_v1_PolicyControllerVersion_descriptor = getDescriptor().getMessageTypes().get(21); internal_static_google_cloud_gkehub_configmanagement_v1_PolicyControllerVersion_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gkehub_configmanagement_v1_PolicyControllerVersion_descriptor, new java.lang.String[] { "Version", }); internal_static_google_cloud_gkehub_configmanagement_v1_GatekeeperDeploymentState_descriptor = getDescriptor().getMessageTypes().get(22); internal_static_google_cloud_gkehub_configmanagement_v1_GatekeeperDeploymentState_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gkehub_configmanagement_v1_GatekeeperDeploymentState_descriptor, new java.lang.String[] { "GatekeeperControllerManagerState", "GatekeeperAudit", }); com.google.protobuf.TimestampProto.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }
openjdk/jdk8
35,438
jaxp/src/com/sun/org/apache/xerces/internal/impl/XMLNamespaceBinder.java
/* * reserved comment block * DO NOT REMOVE OR ALTER! */ /* * The Apache Software License, Version 1.1 * * * Copyright (c) 2000-2002 The Apache Software Foundation. All rights * reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * 3. The end-user documentation included with the redistribution, * if any, must include the following acknowledgment: * "This product includes software developed by the * Apache Software Foundation (http://www.apache.org/)." * Alternately, this acknowledgment may appear in the software itself, * if and wherever such third-party acknowledgments normally appear. * * 4. The names "Xerces" and "Apache Software Foundation" must * not be used to endorse or promote products derived from this * software without prior written permission. For written * permission, please contact apache@apache.org. * * 5. Products derived from this software may not be called "Apache", * nor may "Apache" appear in their name, without prior written * permission of the Apache Software Foundation. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation and was * originally based on software copyright (c) 1999, International * Business Machines, Inc., http://www.apache.org. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. */ package com.sun.org.apache.xerces.internal.impl; import com.sun.org.apache.xerces.internal.impl.msg.XMLMessageFormatter; import com.sun.org.apache.xerces.internal.util.SymbolTable; import com.sun.org.apache.xerces.internal.util.XMLSymbols; import com.sun.org.apache.xerces.internal.xni.Augmentations; import com.sun.org.apache.xerces.internal.xni.NamespaceContext; import com.sun.org.apache.xerces.internal.xni.QName; import com.sun.org.apache.xerces.internal.xni.XMLAttributes; import com.sun.org.apache.xerces.internal.xni.XMLDocumentHandler; import com.sun.org.apache.xerces.internal.xni.XMLLocator; import com.sun.org.apache.xerces.internal.xni.XMLResourceIdentifier; import com.sun.org.apache.xerces.internal.xni.XMLString; import com.sun.org.apache.xerces.internal.xni.XNIException; import com.sun.org.apache.xerces.internal.xni.parser.XMLComponent; import com.sun.org.apache.xerces.internal.xni.parser.XMLComponentManager; import com.sun.org.apache.xerces.internal.xni.parser.XMLConfigurationException; import com.sun.org.apache.xerces.internal.xni.parser.XMLDocumentFilter; import com.sun.org.apache.xerces.internal.xni.parser.XMLDocumentSource; /** * This class performs namespace binding on the startElement and endElement * method calls and passes all other methods through to the registered * document handler. This class can be configured to only pass the * start and end prefix mappings (start/endPrefixMapping). * <p> * This component requires the following features and properties from the * component manager that uses it: * <ul> * <li>http://xml.org/sax/features/namespaces</li> * <li>http://apache.org/xml/properties/internal/symbol-table</li> * <li>http://apache.org/xml/properties/internal/error-reporter</li> * </ul> * * @xerces.internal * * @author Andy Clark, IBM * * @version $Id: XMLNamespaceBinder.java,v 1.4 2010-11-01 04:39:41 joehw Exp $ */ public class XMLNamespaceBinder implements XMLComponent, XMLDocumentFilter { // // Constants // // feature identifiers /** Feature identifier: namespaces. */ protected static final String NAMESPACES = Constants.SAX_FEATURE_PREFIX + Constants.NAMESPACES_FEATURE; // property identifiers /** Property identifier: symbol table. */ protected static final String SYMBOL_TABLE = Constants.XERCES_PROPERTY_PREFIX + Constants.SYMBOL_TABLE_PROPERTY; /** Property identifier: error reporter. */ protected static final String ERROR_REPORTER = Constants.XERCES_PROPERTY_PREFIX + Constants.ERROR_REPORTER_PROPERTY; // recognized features and properties /** Recognized features. */ private static final String[] RECOGNIZED_FEATURES = { NAMESPACES, }; /** Feature defaults. */ private static final Boolean[] FEATURE_DEFAULTS = { null, }; /** Recognized properties. */ private static final String[] RECOGNIZED_PROPERTIES = { SYMBOL_TABLE, ERROR_REPORTER, }; /** Property defaults. */ private static final Object[] PROPERTY_DEFAULTS = { null, null, }; // // Data // // features /** Namespaces. */ protected boolean fNamespaces; // properties /** Symbol table. */ protected SymbolTable fSymbolTable; /** Error reporter. */ protected XMLErrorReporter fErrorReporter; // handlers /** Document handler. */ protected XMLDocumentHandler fDocumentHandler; protected XMLDocumentSource fDocumentSource; // settings /** Only pass start and end prefix mapping events. */ protected boolean fOnlyPassPrefixMappingEvents; // shared context /** Namespace context. */ private NamespaceContext fNamespaceContext; // temp vars /** Attribute QName. */ private QName fAttributeQName = new QName(); // // Constructors // /** Default constructor. */ public XMLNamespaceBinder() { } // <init>() // // Public methods // // settings /** * Sets whether the namespace binder only passes the prefix mapping * events to the registered document handler or passes all document * events. * * @param onlyPassPrefixMappingEvents True to pass only the prefix * mapping events; false to pass * all events. */ public void setOnlyPassPrefixMappingEvents(boolean onlyPassPrefixMappingEvents) { fOnlyPassPrefixMappingEvents = onlyPassPrefixMappingEvents; } // setOnlyPassPrefixMappingEvents(boolean) /** * Returns true if the namespace binder only passes the prefix mapping * events to the registered document handler; false if the namespace * binder passes all document events. */ public boolean getOnlyPassPrefixMappingEvents() { return fOnlyPassPrefixMappingEvents; } // getOnlyPassPrefixMappingEvents():boolean // // XMLComponent methods // /** * Resets the component. The component can query the component manager * about any features and properties that affect the operation of the * component. * * @param componentManager The component manager. * * @throws SAXException Thrown by component on initialization error. * For example, if a feature or property is * required for the operation of the component, the * component manager may throw a * SAXNotRecognizedException or a * SAXNotSupportedException. */ public void reset(XMLComponentManager componentManager) throws XNIException { // features fNamespaces = componentManager.getFeature(NAMESPACES, true); // Xerces properties fSymbolTable = (SymbolTable)componentManager.getProperty(SYMBOL_TABLE); fErrorReporter = (XMLErrorReporter)componentManager.getProperty(ERROR_REPORTER); } // reset(XMLComponentManager) /** * Returns a list of feature identifiers that are recognized by * this component. This method may return null if no features * are recognized by this component. */ public String[] getRecognizedFeatures() { return (String[])(RECOGNIZED_FEATURES.clone()); } // getRecognizedFeatures():String[] /** * Sets the state of a feature. This method is called by the component * manager any time after reset when a feature changes state. * <p> * <strong>Note:</strong> Components should silently ignore features * that do not affect the operation of the component. * * @param featureId The feature identifier. * @param state The state of the feature. * * @throws SAXNotRecognizedException The component should not throw * this exception. * @throws SAXNotSupportedException The component should not throw * this exception. */ public void setFeature(String featureId, boolean state) throws XMLConfigurationException { } // setFeature(String,boolean) /** * Returns a list of property identifiers that are recognized by * this component. This method may return null if no properties * are recognized by this component. */ public String[] getRecognizedProperties() { return (String[])(RECOGNIZED_PROPERTIES.clone()); } // getRecognizedProperties():String[] /** * Sets the value of a property during parsing. * * @param propertyId * @param value */ public void setProperty(String propertyId, Object value) throws XMLConfigurationException { // Xerces properties if (propertyId.startsWith(Constants.XERCES_PROPERTY_PREFIX)) { final int suffixLength = propertyId.length() - Constants.XERCES_PROPERTY_PREFIX.length(); if (suffixLength == Constants.SYMBOL_TABLE_PROPERTY.length() && propertyId.endsWith(Constants.SYMBOL_TABLE_PROPERTY)) { fSymbolTable = (SymbolTable)value; } else if (suffixLength == Constants.ERROR_REPORTER_PROPERTY.length() && propertyId.endsWith(Constants.ERROR_REPORTER_PROPERTY)) { fErrorReporter = (XMLErrorReporter)value; } return; } } // setProperty(String,Object) /** * Returns the default state for a feature, or null if this * component does not want to report a default value for this * feature. * * @param featureId The feature identifier. * * @since Xerces 2.2.0 */ public Boolean getFeatureDefault(String featureId) { for (int i = 0; i < RECOGNIZED_FEATURES.length; i++) { if (RECOGNIZED_FEATURES[i].equals(featureId)) { return FEATURE_DEFAULTS[i]; } } return null; } // getFeatureDefault(String):Boolean /** * Returns the default state for a property, or null if this * component does not want to report a default value for this * property. * * @param propertyId The property identifier. * * @since Xerces 2.2.0 */ public Object getPropertyDefault(String propertyId) { for (int i = 0; i < RECOGNIZED_PROPERTIES.length; i++) { if (RECOGNIZED_PROPERTIES[i].equals(propertyId)) { return PROPERTY_DEFAULTS[i]; } } return null; } // getPropertyDefault(String):Object // // XMLDocumentSource methods // /** Sets the document handler to receive information about the document. */ public void setDocumentHandler(XMLDocumentHandler documentHandler) { fDocumentHandler = documentHandler; } // setDocumentHandler(XMLDocumentHandler) /** Returns the document handler */ public XMLDocumentHandler getDocumentHandler() { return fDocumentHandler; } // setDocumentHandler(XMLDocumentHandler) // // XMLDocumentHandler methods // /** Sets the document source */ public void setDocumentSource(XMLDocumentSource source){ fDocumentSource = source; } // setDocumentSource /** Returns the document source */ public XMLDocumentSource getDocumentSource (){ return fDocumentSource; } // getDocumentSource /** * This method notifies the start of a general entity. * <p> * <strong>Note:</strong> This method is not called for entity references * appearing as part of attribute values. * * @param name The name of the general entity. * @param identifier The resource identifier. * @param encoding The auto-detected IANA encoding name of the entity * stream. This value will be null in those situations * where the entity encoding is not auto-detected (e.g. * internal entities or a document entity that is * parsed from a java.io.Reader). * @param augs Additional information that may include infoset augmentations * * @exception XNIException Thrown by handler to signal an error. */ public void startGeneralEntity(String name, XMLResourceIdentifier identifier, String encoding, Augmentations augs) throws XNIException { if (fDocumentHandler != null && !fOnlyPassPrefixMappingEvents) { fDocumentHandler.startGeneralEntity(name, identifier, encoding, augs); } } // startEntity(String,String,String,String,String) /** * Notifies of the presence of a TextDecl line in an entity. If present, * this method will be called immediately following the startEntity call. * <p> * <strong>Note:</strong> This method will never be called for the * document entity; it is only called for external general entities * referenced in document content. * <p> * <strong>Note:</strong> This method is not called for entity references * appearing as part of attribute values. * * @param version The XML version, or null if not specified. * @param encoding The IANA encoding name of the entity. * @param augs Additional information that may include infoset augmentations * * @throws XNIException Thrown by handler to signal an error. */ public void textDecl(String version, String encoding, Augmentations augs) throws XNIException { if (fDocumentHandler != null && !fOnlyPassPrefixMappingEvents) { fDocumentHandler.textDecl(version, encoding, augs); } } // textDecl(String,String) /** * The start of the document. * * @param locator The system identifier of the entity if the entity * is external, null otherwise. * @param encoding The auto-detected IANA encoding name of the entity * stream. This value will be null in those situations * where the entity encoding is not auto-detected (e.g. * internal entities or a document entity that is * parsed from a java.io.Reader). * @param namespaceContext * The namespace context in effect at the * start of this document. * This object represents the current context. * Implementors of this class are responsible * for copying the namespace bindings from the * the current context (and its parent contexts) * if that information is important. * @param augs Additional information that may include infoset augmentations * * @throws XNIException Thrown by handler to signal an error. */ public void startDocument(XMLLocator locator, String encoding, NamespaceContext namespaceContext, Augmentations augs) throws XNIException { fNamespaceContext = namespaceContext; if (fDocumentHandler != null && !fOnlyPassPrefixMappingEvents) { fDocumentHandler.startDocument(locator, encoding, namespaceContext, augs); } } // startDocument(XMLLocator,String) /** * Notifies of the presence of an XMLDecl line in the document. If * present, this method will be called immediately following the * startDocument call. * * @param version The XML version. * @param encoding The IANA encoding name of the document, or null if * not specified. * @param standalone The standalone value, or null if not specified. * @param augs Additional information that may include infoset augmentations * * @throws XNIException Thrown by handler to signal an error. */ public void xmlDecl(String version, String encoding, String standalone, Augmentations augs) throws XNIException { if (fDocumentHandler != null && !fOnlyPassPrefixMappingEvents) { fDocumentHandler.xmlDecl(version, encoding, standalone, augs); } } // xmlDecl(String,String,String) /** * Notifies of the presence of the DOCTYPE line in the document. * * @param rootElement The name of the root element. * @param publicId The public identifier if an external DTD or null * if the external DTD is specified using SYSTEM. * @param systemId The system identifier if an external DTD, null * otherwise. * @param augs Additional information that may include infoset augmentations * * @throws XNIException Thrown by handler to signal an error. */ public void doctypeDecl(String rootElement, String publicId, String systemId, Augmentations augs) throws XNIException { if (fDocumentHandler != null && !fOnlyPassPrefixMappingEvents) { fDocumentHandler.doctypeDecl(rootElement, publicId, systemId, augs); } } // doctypeDecl(String,String,String) /** * A comment. * * @param text The text in the comment. * @param augs Additional information that may include infoset augmentations * * @throws XNIException Thrown by application to signal an error. */ public void comment(XMLString text, Augmentations augs) throws XNIException { if (fDocumentHandler != null && !fOnlyPassPrefixMappingEvents) { fDocumentHandler.comment(text, augs); } } // comment(XMLString) /** * A processing instruction. Processing instructions consist of a * target name and, optionally, text data. The data is only meaningful * to the application. * <p> * Typically, a processing instruction's data will contain a series * of pseudo-attributes. These pseudo-attributes follow the form of * element attributes but are <strong>not</strong> parsed or presented * to the application as anything other than text. The application is * responsible for parsing the data. * * @param target The target. * @param data The data or null if none specified. * @param augs Additional information that may include infoset augmentations * * @throws XNIException Thrown by handler to signal an error. */ public void processingInstruction(String target, XMLString data, Augmentations augs) throws XNIException { if (fDocumentHandler != null && !fOnlyPassPrefixMappingEvents) { fDocumentHandler.processingInstruction(target, data, augs); } } // processingInstruction(String,XMLString) /** * Binds the namespaces. This method will handle calling the * document handler to start the prefix mappings. * <p> * <strong>Note:</strong> This method makes use of the * fAttributeQName variable. Any contents of the variable will * be destroyed. Caller should copy the values out of this * temporary variable before calling this method. * * @param element The name of the element. * @param attributes The element attributes. * @param augs Additional information that may include infoset augmentations * * @throws XNIException Thrown by handler to signal an error. */ public void startElement(QName element, XMLAttributes attributes, Augmentations augs) throws XNIException { if (fNamespaces) { handleStartElement(element, attributes, augs, false); } else if (fDocumentHandler != null) { fDocumentHandler.startElement(element, attributes, augs); } } // startElement(QName,XMLAttributes) /** * An empty element. * * @param element The name of the element. * @param attributes The element attributes. * @param augs Additional information that may include infoset augmentations * * @throws XNIException Thrown by handler to signal an error. */ public void emptyElement(QName element, XMLAttributes attributes, Augmentations augs) throws XNIException { if (fNamespaces) { handleStartElement(element, attributes, augs, true); handleEndElement(element, augs, true); } else if (fDocumentHandler != null) { fDocumentHandler.emptyElement(element, attributes, augs); } } // emptyElement(QName,XMLAttributes) /** * Character content. * * @param text The content. * @param augs Additional information that may include infoset augmentations * * @throws XNIException Thrown by handler to signal an error. */ public void characters(XMLString text, Augmentations augs) throws XNIException { if (fDocumentHandler != null && !fOnlyPassPrefixMappingEvents) { fDocumentHandler.characters(text, augs); } } // characters(XMLString) /** * Ignorable whitespace. For this method to be called, the document * source must have some way of determining that the text containing * only whitespace characters should be considered ignorable. For * example, the validator can determine if a length of whitespace * characters in the document are ignorable based on the element * content model. * * @param text The ignorable whitespace. * @param augs Additional information that may include infoset augmentations * * @throws XNIException Thrown by handler to signal an error. */ public void ignorableWhitespace(XMLString text, Augmentations augs) throws XNIException { if (fDocumentHandler != null && !fOnlyPassPrefixMappingEvents) { fDocumentHandler.ignorableWhitespace(text, augs); } } // ignorableWhitespace(XMLString) /** * The end of an element. * * @param element The name of the element. * @param augs Additional information that may include infoset augmentations * * @throws XNIException Thrown by handler to signal an error. */ public void endElement(QName element, Augmentations augs) throws XNIException { if (fNamespaces) { handleEndElement(element, augs, false); } else if (fDocumentHandler != null) { fDocumentHandler.endElement(element, augs); } } // endElement(QName) /** * The start of a CDATA section. * @param augs Additional information that may include infoset augmentations * * @throws XNIException Thrown by handler to signal an error. */ public void startCDATA(Augmentations augs) throws XNIException { if (fDocumentHandler != null && !fOnlyPassPrefixMappingEvents) { fDocumentHandler.startCDATA(augs); } } // startCDATA() /** * The end of a CDATA section. * @param augs Additional information that may include infoset augmentations * * @throws XNIException Thrown by handler to signal an error. */ public void endCDATA(Augmentations augs) throws XNIException { if (fDocumentHandler != null && !fOnlyPassPrefixMappingEvents) { fDocumentHandler.endCDATA(augs); } } // endCDATA() /** * The end of the document. * @param augs Additional information that may include infoset augmentations * * @throws XNIException Thrown by handler to signal an error. */ public void endDocument(Augmentations augs) throws XNIException { if (fDocumentHandler != null && !fOnlyPassPrefixMappingEvents) { fDocumentHandler.endDocument(augs); } } // endDocument() /** * This method notifies the end of a general entity. * <p> * <strong>Note:</strong> This method is not called for entity references * appearing as part of attribute values. * * @param name The name of the entity. * @param augs Additional information that may include infoset augmentations * * @exception XNIException * Thrown by handler to signal an error. */ public void endGeneralEntity(String name, Augmentations augs) throws XNIException { if (fDocumentHandler != null && !fOnlyPassPrefixMappingEvents) { fDocumentHandler.endGeneralEntity(name, augs); } } // endEntity(String) // // Protected methods // /** Handles start element. */ protected void handleStartElement(QName element, XMLAttributes attributes, Augmentations augs, boolean isEmpty) throws XNIException { // add new namespace context fNamespaceContext.pushContext(); if (element.prefix == XMLSymbols.PREFIX_XMLNS) { fErrorReporter.reportError(XMLMessageFormatter.XMLNS_DOMAIN, "ElementXMLNSPrefix", new Object[]{element.rawname}, XMLErrorReporter.SEVERITY_FATAL_ERROR); } // search for new namespace bindings int length = attributes.getLength(); for (int i = 0; i < length; i++) { String localpart = attributes.getLocalName(i); String prefix = attributes.getPrefix(i); // when it's of form xmlns="..." or xmlns:prefix="...", // it's a namespace declaration. but prefix:xmlns="..." isn't. if (prefix == XMLSymbols.PREFIX_XMLNS || prefix == XMLSymbols.EMPTY_STRING && localpart == XMLSymbols.PREFIX_XMLNS) { // get the internalized value of this attribute String uri = fSymbolTable.addSymbol(attributes.getValue(i)); // 1. "xmlns" can't be bound to any namespace if (prefix == XMLSymbols.PREFIX_XMLNS && localpart == XMLSymbols.PREFIX_XMLNS) { fErrorReporter.reportError(XMLMessageFormatter.XMLNS_DOMAIN, "CantBindXMLNS", new Object[]{attributes.getQName(i)}, XMLErrorReporter.SEVERITY_FATAL_ERROR); } // 2. the namespace for "xmlns" can't be bound to any prefix if (uri == NamespaceContext.XMLNS_URI) { fErrorReporter.reportError(XMLMessageFormatter.XMLNS_DOMAIN, "CantBindXMLNS", new Object[]{attributes.getQName(i)}, XMLErrorReporter.SEVERITY_FATAL_ERROR); } // 3. "xml" can't be bound to any other namespace than it's own if (localpart == XMLSymbols.PREFIX_XML) { if (uri != NamespaceContext.XML_URI) { fErrorReporter.reportError(XMLMessageFormatter.XMLNS_DOMAIN, "CantBindXML", new Object[]{attributes.getQName(i)}, XMLErrorReporter.SEVERITY_FATAL_ERROR); } } // 4. the namespace for "xml" can't be bound to any other prefix else { if (uri ==NamespaceContext.XML_URI) { fErrorReporter.reportError(XMLMessageFormatter.XMLNS_DOMAIN, "CantBindXML", new Object[]{attributes.getQName(i)}, XMLErrorReporter.SEVERITY_FATAL_ERROR); } } prefix = localpart != XMLSymbols.PREFIX_XMLNS ? localpart : XMLSymbols.EMPTY_STRING; // http://www.w3.org/TR/1999/REC-xml-names-19990114/#dt-prefix // We should only report an error if there is a prefix, // that is, the local part is not "xmlns". -SG // Since this is an error condition in XML 1.0, // and should be relatively uncommon in XML 1.1, // making this test into a method call to reuse code // should be acceptable. - NG if(prefixBoundToNullURI(uri, localpart)) { fErrorReporter.reportError(XMLMessageFormatter.XMLNS_DOMAIN, "EmptyPrefixedAttName", new Object[]{attributes.getQName(i)}, XMLErrorReporter.SEVERITY_FATAL_ERROR); continue; } // declare prefix in context fNamespaceContext.declarePrefix(prefix, uri.length() != 0 ? uri : null); } } // bind the element String prefix = element.prefix != null ? element.prefix : XMLSymbols.EMPTY_STRING; element.uri = fNamespaceContext.getURI(prefix); if (element.prefix == null && element.uri != null) { element.prefix = XMLSymbols.EMPTY_STRING; } if (element.prefix != null && element.uri == null) { fErrorReporter.reportError(XMLMessageFormatter.XMLNS_DOMAIN, "ElementPrefixUnbound", new Object[]{element.prefix, element.rawname}, XMLErrorReporter.SEVERITY_FATAL_ERROR); } // bind the attributes for (int i = 0; i < length; i++) { attributes.getName(i, fAttributeQName); String aprefix = fAttributeQName.prefix != null ? fAttributeQName.prefix : XMLSymbols.EMPTY_STRING; String arawname = fAttributeQName.rawname; if (arawname == XMLSymbols.PREFIX_XMLNS) { fAttributeQName.uri = fNamespaceContext.getURI(XMLSymbols.PREFIX_XMLNS); attributes.setName(i, fAttributeQName); } else if (aprefix != XMLSymbols.EMPTY_STRING) { fAttributeQName.uri = fNamespaceContext.getURI(aprefix); if (fAttributeQName.uri == null) { fErrorReporter.reportError(XMLMessageFormatter.XMLNS_DOMAIN, "AttributePrefixUnbound", new Object[]{element.rawname,arawname,aprefix}, XMLErrorReporter.SEVERITY_FATAL_ERROR); } attributes.setName(i, fAttributeQName); } } // verify that duplicate attributes don't exist // Example: <foo xmlns:a='NS' xmlns:b='NS' a:attr='v1' b:attr='v2'/> int attrCount = attributes.getLength(); for (int i = 0; i < attrCount - 1; i++) { String auri = attributes.getURI(i); if (auri == null || auri == NamespaceContext.XMLNS_URI) { continue; } String alocalpart = attributes.getLocalName(i); for (int j = i + 1; j < attrCount; j++) { String blocalpart = attributes.getLocalName(j); String buri = attributes.getURI(j); if (alocalpart == blocalpart && auri == buri) { fErrorReporter.reportError(XMLMessageFormatter.XMLNS_DOMAIN, "AttributeNSNotUnique", new Object[]{element.rawname,alocalpart, auri}, XMLErrorReporter.SEVERITY_FATAL_ERROR); } } } // call handler if (fDocumentHandler != null && !fOnlyPassPrefixMappingEvents) { if (isEmpty) { fDocumentHandler.emptyElement(element, attributes, augs); } else { fDocumentHandler.startElement(element, attributes, augs); } } } // handleStartElement(QName,XMLAttributes,boolean) /** Handles end element. */ protected void handleEndElement(QName element, Augmentations augs, boolean isEmpty) throws XNIException { // bind element String eprefix = element.prefix != null ? element.prefix : XMLSymbols.EMPTY_STRING; element.uri = fNamespaceContext.getURI(eprefix); if (element.uri != null) { element.prefix = eprefix; } // call handlers if (fDocumentHandler != null && !fOnlyPassPrefixMappingEvents) { if (!isEmpty) { fDocumentHandler.endElement(element, augs); } } // pop context fNamespaceContext.popContext(); } // handleEndElement(QName,boolean) // returns true iff the given prefix is bound to "" *and* // this is disallowed by the version of XML namespaces in use. protected boolean prefixBoundToNullURI(String uri, String localpart) { return (uri == XMLSymbols.EMPTY_STRING && localpart != XMLSymbols.PREFIX_XMLNS); } // prefixBoundToNullURI(String, String): boolean } // class XMLNamespaceBinder
googleapis/google-cloud-java
35,132
java-oracledatabase/proto-google-cloud-oracledatabase-v1/src/main/java/com/google/cloud/oracledatabase/v1/ListDbNodesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/oracledatabase/v1/oracledatabase.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.oracledatabase.v1; /** * * * <pre> * The response for `DbNode.List`. * </pre> * * Protobuf type {@code google.cloud.oracledatabase.v1.ListDbNodesResponse} */ public final class ListDbNodesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.oracledatabase.v1.ListDbNodesResponse) ListDbNodesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListDbNodesResponse.newBuilder() to construct. private ListDbNodesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListDbNodesResponse() { dbNodes_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListDbNodesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.oracledatabase.v1.V1mainProto .internal_static_google_cloud_oracledatabase_v1_ListDbNodesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.oracledatabase.v1.V1mainProto .internal_static_google_cloud_oracledatabase_v1_ListDbNodesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.oracledatabase.v1.ListDbNodesResponse.class, com.google.cloud.oracledatabase.v1.ListDbNodesResponse.Builder.class); } public static final int DB_NODES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.oracledatabase.v1.DbNode> dbNodes_; /** * * * <pre> * The list of DB Nodes * </pre> * * <code>repeated .google.cloud.oracledatabase.v1.DbNode db_nodes = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.oracledatabase.v1.DbNode> getDbNodesList() { return dbNodes_; } /** * * * <pre> * The list of DB Nodes * </pre> * * <code>repeated .google.cloud.oracledatabase.v1.DbNode db_nodes = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.oracledatabase.v1.DbNodeOrBuilder> getDbNodesOrBuilderList() { return dbNodes_; } /** * * * <pre> * The list of DB Nodes * </pre> * * <code>repeated .google.cloud.oracledatabase.v1.DbNode db_nodes = 1;</code> */ @java.lang.Override public int getDbNodesCount() { return dbNodes_.size(); } /** * * * <pre> * The list of DB Nodes * </pre> * * <code>repeated .google.cloud.oracledatabase.v1.DbNode db_nodes = 1;</code> */ @java.lang.Override public com.google.cloud.oracledatabase.v1.DbNode getDbNodes(int index) { return dbNodes_.get(index); } /** * * * <pre> * The list of DB Nodes * </pre> * * <code>repeated .google.cloud.oracledatabase.v1.DbNode db_nodes = 1;</code> */ @java.lang.Override public com.google.cloud.oracledatabase.v1.DbNodeOrBuilder getDbNodesOrBuilder(int index) { return dbNodes_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token identifying a page of results the node should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token identifying a page of results the node should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < dbNodes_.size(); i++) { output.writeMessage(1, dbNodes_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < dbNodes_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, dbNodes_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.oracledatabase.v1.ListDbNodesResponse)) { return super.equals(obj); } com.google.cloud.oracledatabase.v1.ListDbNodesResponse other = (com.google.cloud.oracledatabase.v1.ListDbNodesResponse) obj; if (!getDbNodesList().equals(other.getDbNodesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getDbNodesCount() > 0) { hash = (37 * hash) + DB_NODES_FIELD_NUMBER; hash = (53 * hash) + getDbNodesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.oracledatabase.v1.ListDbNodesResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.oracledatabase.v1.ListDbNodesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.oracledatabase.v1.ListDbNodesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.oracledatabase.v1.ListDbNodesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.oracledatabase.v1.ListDbNodesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.oracledatabase.v1.ListDbNodesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.oracledatabase.v1.ListDbNodesResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.oracledatabase.v1.ListDbNodesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.oracledatabase.v1.ListDbNodesResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.oracledatabase.v1.ListDbNodesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.oracledatabase.v1.ListDbNodesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.oracledatabase.v1.ListDbNodesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.oracledatabase.v1.ListDbNodesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The response for `DbNode.List`. * </pre> * * Protobuf type {@code google.cloud.oracledatabase.v1.ListDbNodesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.oracledatabase.v1.ListDbNodesResponse) com.google.cloud.oracledatabase.v1.ListDbNodesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.oracledatabase.v1.V1mainProto .internal_static_google_cloud_oracledatabase_v1_ListDbNodesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.oracledatabase.v1.V1mainProto .internal_static_google_cloud_oracledatabase_v1_ListDbNodesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.oracledatabase.v1.ListDbNodesResponse.class, com.google.cloud.oracledatabase.v1.ListDbNodesResponse.Builder.class); } // Construct using com.google.cloud.oracledatabase.v1.ListDbNodesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (dbNodesBuilder_ == null) { dbNodes_ = java.util.Collections.emptyList(); } else { dbNodes_ = null; dbNodesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.oracledatabase.v1.V1mainProto .internal_static_google_cloud_oracledatabase_v1_ListDbNodesResponse_descriptor; } @java.lang.Override public com.google.cloud.oracledatabase.v1.ListDbNodesResponse getDefaultInstanceForType() { return com.google.cloud.oracledatabase.v1.ListDbNodesResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.oracledatabase.v1.ListDbNodesResponse build() { com.google.cloud.oracledatabase.v1.ListDbNodesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.oracledatabase.v1.ListDbNodesResponse buildPartial() { com.google.cloud.oracledatabase.v1.ListDbNodesResponse result = new com.google.cloud.oracledatabase.v1.ListDbNodesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.oracledatabase.v1.ListDbNodesResponse result) { if (dbNodesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { dbNodes_ = java.util.Collections.unmodifiableList(dbNodes_); bitField0_ = (bitField0_ & ~0x00000001); } result.dbNodes_ = dbNodes_; } else { result.dbNodes_ = dbNodesBuilder_.build(); } } private void buildPartial0(com.google.cloud.oracledatabase.v1.ListDbNodesResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.oracledatabase.v1.ListDbNodesResponse) { return mergeFrom((com.google.cloud.oracledatabase.v1.ListDbNodesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.oracledatabase.v1.ListDbNodesResponse other) { if (other == com.google.cloud.oracledatabase.v1.ListDbNodesResponse.getDefaultInstance()) return this; if (dbNodesBuilder_ == null) { if (!other.dbNodes_.isEmpty()) { if (dbNodes_.isEmpty()) { dbNodes_ = other.dbNodes_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureDbNodesIsMutable(); dbNodes_.addAll(other.dbNodes_); } onChanged(); } } else { if (!other.dbNodes_.isEmpty()) { if (dbNodesBuilder_.isEmpty()) { dbNodesBuilder_.dispose(); dbNodesBuilder_ = null; dbNodes_ = other.dbNodes_; bitField0_ = (bitField0_ & ~0x00000001); dbNodesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getDbNodesFieldBuilder() : null; } else { dbNodesBuilder_.addAllMessages(other.dbNodes_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.oracledatabase.v1.DbNode m = input.readMessage( com.google.cloud.oracledatabase.v1.DbNode.parser(), extensionRegistry); if (dbNodesBuilder_ == null) { ensureDbNodesIsMutable(); dbNodes_.add(m); } else { dbNodesBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.oracledatabase.v1.DbNode> dbNodes_ = java.util.Collections.emptyList(); private void ensureDbNodesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { dbNodes_ = new java.util.ArrayList<com.google.cloud.oracledatabase.v1.DbNode>(dbNodes_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.oracledatabase.v1.DbNode, com.google.cloud.oracledatabase.v1.DbNode.Builder, com.google.cloud.oracledatabase.v1.DbNodeOrBuilder> dbNodesBuilder_; /** * * * <pre> * The list of DB Nodes * </pre> * * <code>repeated .google.cloud.oracledatabase.v1.DbNode db_nodes = 1;</code> */ public java.util.List<com.google.cloud.oracledatabase.v1.DbNode> getDbNodesList() { if (dbNodesBuilder_ == null) { return java.util.Collections.unmodifiableList(dbNodes_); } else { return dbNodesBuilder_.getMessageList(); } } /** * * * <pre> * The list of DB Nodes * </pre> * * <code>repeated .google.cloud.oracledatabase.v1.DbNode db_nodes = 1;</code> */ public int getDbNodesCount() { if (dbNodesBuilder_ == null) { return dbNodes_.size(); } else { return dbNodesBuilder_.getCount(); } } /** * * * <pre> * The list of DB Nodes * </pre> * * <code>repeated .google.cloud.oracledatabase.v1.DbNode db_nodes = 1;</code> */ public com.google.cloud.oracledatabase.v1.DbNode getDbNodes(int index) { if (dbNodesBuilder_ == null) { return dbNodes_.get(index); } else { return dbNodesBuilder_.getMessage(index); } } /** * * * <pre> * The list of DB Nodes * </pre> * * <code>repeated .google.cloud.oracledatabase.v1.DbNode db_nodes = 1;</code> */ public Builder setDbNodes(int index, com.google.cloud.oracledatabase.v1.DbNode value) { if (dbNodesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureDbNodesIsMutable(); dbNodes_.set(index, value); onChanged(); } else { dbNodesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of DB Nodes * </pre> * * <code>repeated .google.cloud.oracledatabase.v1.DbNode db_nodes = 1;</code> */ public Builder setDbNodes( int index, com.google.cloud.oracledatabase.v1.DbNode.Builder builderForValue) { if (dbNodesBuilder_ == null) { ensureDbNodesIsMutable(); dbNodes_.set(index, builderForValue.build()); onChanged(); } else { dbNodesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of DB Nodes * </pre> * * <code>repeated .google.cloud.oracledatabase.v1.DbNode db_nodes = 1;</code> */ public Builder addDbNodes(com.google.cloud.oracledatabase.v1.DbNode value) { if (dbNodesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureDbNodesIsMutable(); dbNodes_.add(value); onChanged(); } else { dbNodesBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of DB Nodes * </pre> * * <code>repeated .google.cloud.oracledatabase.v1.DbNode db_nodes = 1;</code> */ public Builder addDbNodes(int index, com.google.cloud.oracledatabase.v1.DbNode value) { if (dbNodesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureDbNodesIsMutable(); dbNodes_.add(index, value); onChanged(); } else { dbNodesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of DB Nodes * </pre> * * <code>repeated .google.cloud.oracledatabase.v1.DbNode db_nodes = 1;</code> */ public Builder addDbNodes(com.google.cloud.oracledatabase.v1.DbNode.Builder builderForValue) { if (dbNodesBuilder_ == null) { ensureDbNodesIsMutable(); dbNodes_.add(builderForValue.build()); onChanged(); } else { dbNodesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of DB Nodes * </pre> * * <code>repeated .google.cloud.oracledatabase.v1.DbNode db_nodes = 1;</code> */ public Builder addDbNodes( int index, com.google.cloud.oracledatabase.v1.DbNode.Builder builderForValue) { if (dbNodesBuilder_ == null) { ensureDbNodesIsMutable(); dbNodes_.add(index, builderForValue.build()); onChanged(); } else { dbNodesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of DB Nodes * </pre> * * <code>repeated .google.cloud.oracledatabase.v1.DbNode db_nodes = 1;</code> */ public Builder addAllDbNodes( java.lang.Iterable<? extends com.google.cloud.oracledatabase.v1.DbNode> values) { if (dbNodesBuilder_ == null) { ensureDbNodesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, dbNodes_); onChanged(); } else { dbNodesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of DB Nodes * </pre> * * <code>repeated .google.cloud.oracledatabase.v1.DbNode db_nodes = 1;</code> */ public Builder clearDbNodes() { if (dbNodesBuilder_ == null) { dbNodes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { dbNodesBuilder_.clear(); } return this; } /** * * * <pre> * The list of DB Nodes * </pre> * * <code>repeated .google.cloud.oracledatabase.v1.DbNode db_nodes = 1;</code> */ public Builder removeDbNodes(int index) { if (dbNodesBuilder_ == null) { ensureDbNodesIsMutable(); dbNodes_.remove(index); onChanged(); } else { dbNodesBuilder_.remove(index); } return this; } /** * * * <pre> * The list of DB Nodes * </pre> * * <code>repeated .google.cloud.oracledatabase.v1.DbNode db_nodes = 1;</code> */ public com.google.cloud.oracledatabase.v1.DbNode.Builder getDbNodesBuilder(int index) { return getDbNodesFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of DB Nodes * </pre> * * <code>repeated .google.cloud.oracledatabase.v1.DbNode db_nodes = 1;</code> */ public com.google.cloud.oracledatabase.v1.DbNodeOrBuilder getDbNodesOrBuilder(int index) { if (dbNodesBuilder_ == null) { return dbNodes_.get(index); } else { return dbNodesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of DB Nodes * </pre> * * <code>repeated .google.cloud.oracledatabase.v1.DbNode db_nodes = 1;</code> */ public java.util.List<? extends com.google.cloud.oracledatabase.v1.DbNodeOrBuilder> getDbNodesOrBuilderList() { if (dbNodesBuilder_ != null) { return dbNodesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(dbNodes_); } } /** * * * <pre> * The list of DB Nodes * </pre> * * <code>repeated .google.cloud.oracledatabase.v1.DbNode db_nodes = 1;</code> */ public com.google.cloud.oracledatabase.v1.DbNode.Builder addDbNodesBuilder() { return getDbNodesFieldBuilder() .addBuilder(com.google.cloud.oracledatabase.v1.DbNode.getDefaultInstance()); } /** * * * <pre> * The list of DB Nodes * </pre> * * <code>repeated .google.cloud.oracledatabase.v1.DbNode db_nodes = 1;</code> */ public com.google.cloud.oracledatabase.v1.DbNode.Builder addDbNodesBuilder(int index) { return getDbNodesFieldBuilder() .addBuilder(index, com.google.cloud.oracledatabase.v1.DbNode.getDefaultInstance()); } /** * * * <pre> * The list of DB Nodes * </pre> * * <code>repeated .google.cloud.oracledatabase.v1.DbNode db_nodes = 1;</code> */ public java.util.List<com.google.cloud.oracledatabase.v1.DbNode.Builder> getDbNodesBuilderList() { return getDbNodesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.oracledatabase.v1.DbNode, com.google.cloud.oracledatabase.v1.DbNode.Builder, com.google.cloud.oracledatabase.v1.DbNodeOrBuilder> getDbNodesFieldBuilder() { if (dbNodesBuilder_ == null) { dbNodesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.oracledatabase.v1.DbNode, com.google.cloud.oracledatabase.v1.DbNode.Builder, com.google.cloud.oracledatabase.v1.DbNodeOrBuilder>( dbNodes_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); dbNodes_ = null; } return dbNodesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token identifying a page of results the node should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token identifying a page of results the node should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token identifying a page of results the node should return. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token identifying a page of results the node should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token identifying a page of results the node should return. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.oracledatabase.v1.ListDbNodesResponse) } // @@protoc_insertion_point(class_scope:google.cloud.oracledatabase.v1.ListDbNodesResponse) private static final com.google.cloud.oracledatabase.v1.ListDbNodesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.oracledatabase.v1.ListDbNodesResponse(); } public static com.google.cloud.oracledatabase.v1.ListDbNodesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListDbNodesResponse> PARSER = new com.google.protobuf.AbstractParser<ListDbNodesResponse>() { @java.lang.Override public ListDbNodesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListDbNodesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListDbNodesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.oracledatabase.v1.ListDbNodesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/hadoop
35,047
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AAWSCredentialsProvider.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.fs.s3a; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.InterruptedIOException; import java.io.IOException; import java.net.URI; import java.nio.file.AccessDeniedException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import javax.annotation.Nullable; import org.assertj.core.api.Assertions; import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import software.amazon.awssdk.auth.credentials.AwsBasicCredentials; import software.amazon.awssdk.auth.credentials.AwsCredentials; import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider; import software.amazon.awssdk.auth.credentials.EnvironmentVariableCredentialsProvider; import software.amazon.awssdk.auth.credentials.InstanceProfileCredentialsProvider; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.s3a.auth.AbstractSessionCredentialsProvider; import org.apache.hadoop.fs.s3a.auth.AssumedRoleCredentialProvider; import org.apache.hadoop.fs.s3a.auth.CredentialProviderListFactory; import org.apache.hadoop.fs.s3a.auth.IAMInstanceCredentialsProvider; import org.apache.hadoop.fs.s3a.auth.NoAuthWithAWSException; import org.apache.hadoop.fs.s3a.auth.ProfileAWSCredentialsProvider; import org.apache.hadoop.fs.s3a.auth.delegation.CountInvocationsProvider; import org.apache.hadoop.fs.s3a.impl.InstantiationIOException; import org.apache.hadoop.fs.s3a.test.PublicDatasetTestUtils; import org.apache.hadoop.io.retry.RetryPolicy; import org.apache.hadoop.util.Sets; import static org.apache.hadoop.fs.s3a.Constants.ASSUMED_ROLE_CREDENTIALS_PROVIDER; import static org.apache.hadoop.fs.s3a.Constants.AWS_CREDENTIALS_PROVIDER; import static org.apache.hadoop.fs.s3a.Constants.AWS_CREDENTIALS_PROVIDER_MAPPING; import static org.apache.hadoop.fs.s3a.S3ATestUtils.authenticationContains; import static org.apache.hadoop.fs.s3a.S3ATestUtils.buildClassListString; import static org.apache.hadoop.fs.s3a.auth.CredentialProviderListFactory.STANDARD_AWS_PROVIDERS; import static org.apache.hadoop.fs.s3a.auth.CredentialProviderListFactory.buildAWSProviderList; import static org.apache.hadoop.fs.s3a.auth.CredentialProviderListFactory.createAWSCredentialProviderList; import static org.apache.hadoop.fs.s3a.impl.InstantiationIOException.DOES_NOT_IMPLEMENT; import static org.apache.hadoop.fs.s3a.test.PublicDatasetTestUtils.getExternalData; import static org.apache.hadoop.test.LambdaTestUtils.intercept; import static org.apache.hadoop.test.LambdaTestUtils.interceptFuture; import static org.apache.hadoop.util.StringUtils.STRING_COLLECTION_SPLIT_EQUALS_INVALID_ARG; /** * Unit tests for {@link Constants#AWS_CREDENTIALS_PROVIDER} logic. */ public class TestS3AAWSCredentialsProvider extends AbstractS3ATestBase { /** * URI of the test file: this must be anonymously accessible. * As these are unit tests no actual connection to the store is made. */ private static final URI TESTFILE_URI = new Path( PublicDatasetTestUtils.DEFAULT_EXTERNAL_FILE).toUri(); private static final Logger LOG = LoggerFactory.getLogger(TestS3AAWSCredentialsProvider.class); public static final int TERMINATION_TIMEOUT = 3; @Test public void testProviderWrongClass() throws Exception { expectProviderInstantiationFailure(this.getClass(), DOES_NOT_IMPLEMENT + " software.amazon.awssdk.auth.credentials.AwsCredentialsProvider"); } @Test public void testProviderAbstractClass() throws Exception { expectProviderInstantiationFailure(AbstractProvider.class, InstantiationIOException.ABSTRACT_PROVIDER); } @Test public void testProviderNotAClass() throws Exception { expectProviderInstantiationFailure("NoSuchClass", "ClassNotFoundException"); } @Test public void testProviderConstructorError() throws Exception { expectProviderInstantiationFailure( ConstructorSignatureErrorProvider.class, InstantiationIOException.CONSTRUCTOR_EXCEPTION); } @Test public void testProviderFailureError() throws Exception { expectProviderInstantiationFailure( ConstructorFailureProvider.class, InstantiationIOException.INSTANTIATION_EXCEPTION); } @Test public void testInstantiationChain() throws Throwable { Configuration conf = new Configuration(false); conf.set(AWS_CREDENTIALS_PROVIDER, TemporaryAWSCredentialsProvider.NAME + ", \t" + SimpleAWSCredentialsProvider.NAME + " ,\n " + AnonymousAWSCredentialsProvider.NAME); Path testFile = getExternalData(conf); AWSCredentialProviderList list = createAWSCredentialProviderList( testFile.toUri(), conf); List<Class<?>> expectedClasses = Arrays.asList( TemporaryAWSCredentialsProvider.class, SimpleAWSCredentialsProvider.class, AnonymousAWSCredentialsProvider.class); assertCredentialProviders(expectedClasses, list); } @Test public void testProfileAWSCredentialsProvider() throws Throwable { Configuration conf = new Configuration(false); conf.set(AWS_CREDENTIALS_PROVIDER, ProfileAWSCredentialsProvider.NAME); File tempFile = File.createTempFile("testcred", ".conf", new File("target")); tempFile.deleteOnExit(); try (FileWriter fileWriter = new FileWriter(tempFile); BufferedWriter bufferedWriter = new BufferedWriter(fileWriter)) { bufferedWriter.write("[default]\n" + "aws_access_key_id = defaultaccesskeyid\n" + "aws_secret_access_key = defaultsecretkeyid\n"); bufferedWriter.write("[nondefault]\n" + "aws_access_key_id = nondefaultaccesskeyid\n" + "aws_secret_access_key = nondefaultsecretkeyid\n"); } conf.set(ProfileAWSCredentialsProvider.PROFILE_FILE, tempFile.getAbsolutePath()); URI testUri = new URI("s3a://bucket1"); AWSCredentialProviderList list = createAWSCredentialProviderList(testUri, conf); assertCredentialProviders(Collections.singletonList(ProfileAWSCredentialsProvider.class), list); AwsCredentials credentials = list.resolveCredentials(); Assertions.assertThat(credentials.accessKeyId()).isEqualTo("defaultaccesskeyid"); Assertions.assertThat(credentials.secretAccessKey()).isEqualTo("defaultsecretkeyid"); conf.set(ProfileAWSCredentialsProvider.PROFILE_NAME, "nondefault"); list = createAWSCredentialProviderList(testUri, conf); credentials = list.resolveCredentials(); Assertions.assertThat(credentials.accessKeyId()).isEqualTo("nondefaultaccesskeyid"); Assertions.assertThat(credentials.secretAccessKey()).isEqualTo("nondefaultsecretkeyid"); } @Test public void testDefaultChain() throws Exception { URI uri1 = new URI("s3a://bucket1"), uri2 = new URI("s3a://bucket2"); Configuration conf = new Configuration(false); // use the default credential provider chain conf.unset(AWS_CREDENTIALS_PROVIDER); AWSCredentialProviderList list1 = createAWSCredentialProviderList( uri1, conf); AWSCredentialProviderList list2 = createAWSCredentialProviderList( uri2, conf); List<Class<?>> expectedClasses = STANDARD_AWS_PROVIDERS; assertCredentialProviders(expectedClasses, list1); assertCredentialProviders(expectedClasses, list2); } @Test public void testNonSdkExceptionConversion() throws Throwable { // Create a mock credential provider that throws a non-SDK exception AwsCredentialsProvider mockProvider = () -> { throw new RuntimeException("Test credential error"); }; // Create the provider list with our mock provider AWSCredentialProviderList providerList = new AWSCredentialProviderList(Collections.singletonList(mockProvider)); // Attempt to get credentials, which should trigger the exception intercept(NoAuthWithAWSException.class, "No AWS Credentials provided", () -> providerList.resolveCredentials()); } @Test public void testDefaultChainNoURI() throws Exception { Configuration conf = new Configuration(false); // use the default credential provider chain conf.unset(AWS_CREDENTIALS_PROVIDER); assertCredentialProviders(STANDARD_AWS_PROVIDERS, createAWSCredentialProviderList(null, conf)); } @Test public void testConfiguredChain() throws Exception { URI uri1 = new URI("s3a://bucket1"), uri2 = new URI("s3a://bucket2"); List<Class<?>> expectedClasses = Arrays.asList( IAMInstanceCredentialsProvider.class, AnonymousAWSCredentialsProvider.class, EnvironmentVariableCredentialsProvider.class ); Configuration conf = createProviderConfiguration(buildClassListString(expectedClasses)); AWSCredentialProviderList list1 = createAWSCredentialProviderList( uri1, conf); AWSCredentialProviderList list2 = createAWSCredentialProviderList( uri2, conf); assertCredentialProviders(expectedClasses, list1); assertCredentialProviders(expectedClasses, list2); } @Test public void testConfiguredChainUsesSharedInstanceProfile() throws Exception { URI uri1 = new URI("s3a://bucket1"), uri2 = new URI("s3a://bucket2"); Configuration conf = new Configuration(false); List<Class<?>> expectedClasses = Arrays.asList( InstanceProfileCredentialsProvider.class); conf.set(AWS_CREDENTIALS_PROVIDER, buildClassListString(expectedClasses)); AWSCredentialProviderList list1 = createAWSCredentialProviderList( uri1, conf); AWSCredentialProviderList list2 = createAWSCredentialProviderList( uri2, conf); assertCredentialProviders(expectedClasses, list1); assertCredentialProviders(expectedClasses, list2); } @Test public void testFallbackToDefaults() throws Throwable { // build up the base provider final AWSCredentialProviderList credentials = buildAWSProviderList( new URI("s3a://bucket1"), createProviderConfiguration(" "), ASSUMED_ROLE_CREDENTIALS_PROVIDER, Arrays.asList( EnvironmentVariableCredentialsProvider.class), Sets.newHashSet()); assertTrue(credentials.size() > 0, "empty credentials"); } /** * Test S3A credentials provider remapping with assumed role * credentials provider. */ @Test public void testAssumedRoleWithRemap() throws Throwable { Configuration conf = new Configuration(false); conf.set(ASSUMED_ROLE_CREDENTIALS_PROVIDER, "custom.assume.role.key1,custom.assume.role.key2,custom.assume.role.key3"); conf.set(AWS_CREDENTIALS_PROVIDER_MAPPING, "custom.assume.role.key1=" + CredentialProviderListFactory.ENVIRONMENT_CREDENTIALS_V2 + " ,custom.assume.role.key2 =" + CountInvocationsProvider.NAME + ", custom.assume.role.key3= " + CredentialProviderListFactory.PROFILE_CREDENTIALS_V1); final AWSCredentialProviderList credentials = buildAWSProviderList( new URI("s3a://bucket1"), conf, ASSUMED_ROLE_CREDENTIALS_PROVIDER, new ArrayList<>(), new HashSet<>()); Assertions .assertThat(credentials.size()) .describedAs("List of Credentials providers") .isEqualTo(3); } /** * Test S3A credentials provider remapping with aws * credentials provider. */ @Test public void testAwsCredentialProvidersWithRemap() throws Throwable { Configuration conf = new Configuration(false); conf.set(AWS_CREDENTIALS_PROVIDER, "custom.aws.creds.key1,custom.aws.creds.key2,custom.aws.creds.key3,custom.aws.creds.key4"); conf.set(AWS_CREDENTIALS_PROVIDER_MAPPING, "custom.aws.creds.key1=" + CredentialProviderListFactory.ENVIRONMENT_CREDENTIALS_V2 + " ,\ncustom.aws.creds.key2=" + CountInvocationsProvider.NAME + "\n, custom.aws.creds.key3=" + CredentialProviderListFactory.PROFILE_CREDENTIALS_V1 + ",custom.aws.creds.key4 = " + CredentialProviderListFactory.PROFILE_CREDENTIALS_V2); final AWSCredentialProviderList credentials = buildAWSProviderList( new URI("s3a://bucket1"), conf, AWS_CREDENTIALS_PROVIDER, new ArrayList<>(), new HashSet<>()); Assertions .assertThat(credentials.size()) .describedAs("List of Credentials providers") .isEqualTo(4); } @Test public void testProviderConstructor() throws Throwable { final AWSCredentialProviderList list = new AWSCredentialProviderList("name", new AnonymousAWSCredentialsProvider(), new ErrorProvider(TESTFILE_URI, new Configuration())); Assertions.assertThat(list.getProviders()) .describedAs("provider list in %s", list) .hasSize(2); final AwsCredentials credentials = list.resolveCredentials(); Assertions.assertThat(credentials) .isInstanceOf(AwsBasicCredentials.class); assertCredentialResolution(credentials, null, null); } public static void assertCredentialResolution(AwsCredentials creds, String key, String secret) { Assertions.assertThat(creds.accessKeyId()) .describedAs("access key of %s", creds) .isEqualTo(key); Assertions.assertThat(creds.secretAccessKey()) .describedAs("secret key of %s", creds) .isEqualTo(secret); } private String buildClassList(Class... classes) { return Arrays.stream(classes) .map(Class::getCanonicalName) .collect(Collectors.joining(",")); } private String buildClassList(String... classes) { return Arrays.stream(classes) .collect(Collectors.joining(",")); } /** * A credential provider declared as abstract, so it cannot be instantiated. */ static abstract class AbstractProvider implements AwsCredentialsProvider { @Override public AwsCredentials resolveCredentials() { return null; } } /** * A credential provider whose constructor signature doesn't match. */ protected static class ConstructorSignatureErrorProvider extends AbstractProvider { @SuppressWarnings("unused") public ConstructorSignatureErrorProvider(String str) { } } /** * A credential provider whose constructor raises an NPE. */ protected static class ConstructorFailureProvider extends AbstractProvider { @SuppressWarnings("unused") public ConstructorFailureProvider() { throw new NullPointerException("oops"); } } @Test public void testAWSExceptionTranslation() throws Throwable { IOException ex = expectProviderInstantiationFailure( AWSExceptionRaisingFactory.class, AWSExceptionRaisingFactory.NO_AUTH); if (!(ex instanceof AccessDeniedException)) { throw ex; } } protected static class AWSExceptionRaisingFactory extends AbstractProvider { public static final String NO_AUTH = "No auth"; public static AwsCredentialsProvider create() { throw new NoAuthWithAWSException(NO_AUTH); } } @Test public void testFactoryWrongType() throws Throwable { expectProviderInstantiationFailure( FactoryOfWrongType.class, InstantiationIOException.CONSTRUCTOR_EXCEPTION); } static class FactoryOfWrongType extends AbstractProvider { public static final String NO_AUTH = "No auth"; public static String getInstance() { return "oops"; } @Override public AwsCredentials resolveCredentials() { return null; } } /** * Expect a provider to raise an exception on failure. * @param option aws provider option string. * @param expectedErrorText error text to expect * @return the exception raised * @throws Exception any unexpected exception thrown. */ private IOException expectProviderInstantiationFailure(String option, String expectedErrorText) throws Exception { return intercept(IOException.class, expectedErrorText, () -> createAWSCredentialProviderList( TESTFILE_URI, createProviderConfiguration(option))); } /** * Expect a provider to raise an exception on failure. * @param aClass class to use * @param expectedErrorText error text to expect * @return the exception raised * @throws Exception any unexpected exception thrown. */ private IOException expectProviderInstantiationFailure(Class aClass, String expectedErrorText) throws Exception { return expectProviderInstantiationFailure( buildClassListString(Collections.singletonList(aClass)), expectedErrorText); } /** * Create a configuration with a specific provider. * @param providerOption option for the aws credential provider option. * @return a configuration to use in test cases */ private Configuration createProviderConfiguration( final String providerOption) { Configuration conf = new Configuration(false); conf.set(AWS_CREDENTIALS_PROVIDER, providerOption); return conf; } /** * Create a configuration with a specific class. * @param aClass class to use * @return a configuration to use in test cases */ public Configuration createProviderConfiguration(final Class<?> aClass) { return createProviderConfiguration(buildClassListString( Collections.singletonList(aClass))); } /** * Asserts expected provider classes in list. * @param expectedClasses expected provider classes * @param list providers to check */ private static void assertCredentialProviders( List<Class<?>> expectedClasses, AWSCredentialProviderList list) { assertNotNull(list); List<AwsCredentialsProvider> providers = list.getProviders(); Assertions.assertThat(providers) .describedAs("providers") .hasSize(expectedClasses.size()); for (int i = 0; i < expectedClasses.size(); ++i) { Class<?> expectedClass = expectedClasses.get(i); AwsCredentialsProvider provider = providers.get(i); assertNotNull(provider, String.format("At position %d, expected class is %s, but found null.", i, expectedClass)); assertTrue(expectedClass.isAssignableFrom(provider.getClass()), String.format("At position %d, expected class is %s, but found %s.", i, expectedClass, provider.getClass())); } } /** * This is here to check up on the S3ATestUtils probes themselves. * @see S3ATestUtils#authenticationContains(Configuration, String). */ @Test public void testAuthenticationContainsProbes() { Configuration conf = new Configuration(false); assertFalse(authenticationContains(conf, AssumedRoleCredentialProvider.NAME), "found AssumedRoleCredentialProvider"); conf.set(AWS_CREDENTIALS_PROVIDER, AssumedRoleCredentialProvider.NAME); assertTrue(authenticationContains(conf, AssumedRoleCredentialProvider.NAME), "didn't find AssumedRoleCredentialProvider"); } @Test public void testExceptionLogic() throws Throwable { AWSCredentialProviderList providers = new AWSCredentialProviderList(); // verify you can't get credentials from it NoAuthWithAWSException noAuth = intercept(NoAuthWithAWSException.class, AWSCredentialProviderList.NO_AWS_CREDENTIAL_PROVIDERS, () -> providers.resolveCredentials()); // but that it closes safely providers.close(); S3ARetryPolicy retryPolicy = new S3ARetryPolicy(new Configuration(false)); assertEquals(RetryPolicy.RetryAction.FAIL.action, retryPolicy.shouldRetry(noAuth, 0, 0, true).action, "Expected no retry on auth failure"); try { throw S3AUtils.translateException("login", "", noAuth); } catch (AccessDeniedException expected) { // this is what we want; other exceptions will be passed up assertEquals(RetryPolicy.RetryAction.FAIL.action, retryPolicy.shouldRetry(expected, 0, 0, true).action, "Expected no retry on AccessDeniedException"); } } @Test public void testRefCounting() throws Throwable { AWSCredentialProviderList providers = new AWSCredentialProviderList(); assertEquals(1, providers.getRefCount(), "Ref count for " + providers); AWSCredentialProviderList replicate = providers.share(); assertEquals(providers, replicate); assertEquals(2, providers.getRefCount(), "Ref count after replication for " + providers); assertFalse(providers.isClosed(), "Was closed " + providers); providers.close(); assertFalse(providers.isClosed(), "Was closed " + providers); assertEquals(1, providers.getRefCount(), "Ref count after close() for " + providers); // this should now close it providers.close(); assertTrue(providers.isClosed(), "Was not closed " + providers); assertEquals(0, providers.getRefCount(), "Ref count after close() for " + providers); assertEquals(0, providers.getRefCount(), "Ref count after second close() for " + providers); intercept(IllegalStateException.class, "closed", () -> providers.share()); // final call harmless providers.close(); assertEquals(0, providers.getRefCount(), "Ref count after close() for " + providers); intercept(NoAuthWithAWSException.class, AWSCredentialProviderList.CREDENTIALS_REQUESTED_WHEN_CLOSED, () -> providers.resolveCredentials()); } /** * Verify that IOEs are passed up without being wrapped. */ @Test public void testIOEInConstructorPropagation() throws Throwable { IOException expected = expectProviderInstantiationFailure( IOERaisingProvider.class.getName(), "expected"); if (!(expected instanceof InterruptedIOException)) { throw expected; } } /** * Credential provider which raises an IOE when constructed. */ protected static class IOERaisingProvider extends AbstractProvider { public IOERaisingProvider(URI uri, Configuration conf) throws IOException { throw new InterruptedIOException("expected"); } } private static final AwsCredentials EXPECTED_CREDENTIALS = AwsBasicCredentials.create("expectedAccessKey", "expectedSecret"); /** * Credential provider that takes a long time. */ protected static class SlowProvider extends AbstractSessionCredentialsProvider { public SlowProvider(@Nullable URI uri, Configuration conf) { super(uri, conf); } @Override protected AwsCredentials createCredentials(Configuration config) throws IOException { // yield to other callers to induce race condition Thread.yield(); return EXPECTED_CREDENTIALS; } } private static final int CONCURRENT_THREADS = 4; @Test public void testConcurrentAuthentication() throws Throwable { Configuration conf = createProviderConfiguration(SlowProvider.class.getName()); Path testFile = getExternalData(conf); AWSCredentialProviderList list = createAWSCredentialProviderList(testFile.toUri(), conf); SlowProvider provider = (SlowProvider) list.getProviders().get(0); ExecutorService pool = Executors.newFixedThreadPool(CONCURRENT_THREADS); List<Future<AwsCredentials>> results = new ArrayList<>(); try { assertFalse(provider.isInitialized(), "Provider not initialized. isInitialized should be false"); assertFalse(provider.hasCredentials(), "Provider not initialized. hasCredentials should be false"); if (provider.getInitializationException() != null) { throw new AssertionError( "Provider not initialized. getInitializationException should return null", provider.getInitializationException()); } for (int i = 0; i < CONCURRENT_THREADS; i++) { results.add(pool.submit(() -> list.resolveCredentials())); } for (Future<AwsCredentials> result : results) { AwsCredentials credentials = result.get(); assertEquals("expectedAccessKey", credentials.accessKeyId(), "Access key from credential provider"); assertEquals("expectedSecret", credentials.secretAccessKey(), "Secret key from credential provider"); } } finally { pool.awaitTermination(TERMINATION_TIMEOUT, TimeUnit.SECONDS); pool.shutdown(); } assertTrue(provider.isInitialized(), "Provider initialized without errors. isInitialized should be true"); assertTrue(provider.hasCredentials(), "Provider initialized without errors. hasCredentials should be true"); if (provider.getInitializationException() != null) { throw new AssertionError( "Provider initialized without errors. getInitializationException should return null", provider.getInitializationException()); } } /** * Credential provider with error. */ protected static class ErrorProvider extends AbstractSessionCredentialsProvider { public ErrorProvider(@Nullable URI uri, Configuration conf) { super(uri, conf); } @Override protected AwsCredentials createCredentials(Configuration config) throws IOException { throw new IOException("expected error"); } } @Test public void testConcurrentAuthenticationError() throws Throwable { Configuration conf = createProviderConfiguration(ErrorProvider.class.getName()); Path testFile = getExternalData(conf); AWSCredentialProviderList list = createAWSCredentialProviderList(testFile.toUri(), conf); ErrorProvider provider = (ErrorProvider) list.getProviders().get(0); ExecutorService pool = Executors.newFixedThreadPool(CONCURRENT_THREADS); List<Future<AwsCredentials>> results = new ArrayList<>(); try { assertFalse(provider.isInitialized(), "Provider not initialized. isInitialized should be false"); assertFalse(provider.hasCredentials(), "Provider not initialized. hasCredentials should be false"); if (provider.getInitializationException() != null) { throw new AssertionError( "Provider not initialized. getInitializationException should return null", provider.getInitializationException()); } for (int i = 0; i < CONCURRENT_THREADS; i++) { results.add(pool.submit(() -> list.resolveCredentials())); } for (Future<AwsCredentials> result : results) { interceptFuture(CredentialInitializationException.class, "expected error", result ); } } finally { pool.awaitTermination(TERMINATION_TIMEOUT, TimeUnit.SECONDS); pool.shutdown(); } assertTrue(provider.isInitialized(), "Provider initialization failed. isInitialized should be true"); assertFalse(provider.hasCredentials(), "Provider initialization failed. hasCredentials should be false"); assertTrue(provider.getInitializationException(). getMessage().contains("expected error"), "Provider initialization failed. " + "getInitializationException should contain the error"); } /** * V2 Credentials whose factory method raises ClassNotFoundException. * This will fall back to an attempted v1 load which will fail because it * is the wrong type. * The exception raised will be from the v2 instantiation attempt, * not the v1 attempt. */ @Test public void testV2ClassNotFound() throws Throwable { InstantiationIOException expected = intercept(InstantiationIOException.class, "simulated v2 CNFE", () -> createAWSCredentialProviderList( TESTFILE_URI, createProviderConfiguration(V2CredentialProviderDoesNotInstantiate.class.getName()))); // print for the curious LOG.info("{}", expected.toString()); } /** * Tests for the string utility that will be used by S3A credentials provider. */ @Test public void testStringCollectionSplitByEqualsSuccess() { final Configuration configuration = new Configuration(false); configuration.set("custom_key", ""); Map<String, String> splitMap = S3AUtils.getTrimmedStringCollectionSplitByEquals( configuration, "custom_key"); Assertions .assertThat(splitMap) .describedAs( "Map of key value pairs derived from config, split by equals(=) and comma(,)") .hasSize(0); splitMap = S3AUtils.getTrimmedStringCollectionSplitByEquals( configuration, "not_present"); Assertions .assertThat(splitMap) .describedAs( "Map of key value pairs derived from config, split by equals(=) and comma(,)") .hasSize(0); configuration.set("custom_key", "element.first.key1 = element.first.val1"); splitMap = S3AUtils.getTrimmedStringCollectionSplitByEquals( configuration, "custom_key"); Assertions .assertThat(splitMap) .describedAs( "Map of key value pairs derived from config, split by equals(=) and comma(,)") .hasSize(1) .containsEntry("element.first.key1", "element.first.val1"); configuration.set("custom_key", "element.xyz.key1 =element.abc.val1 , element.xyz.key2= element.abc.val2"); splitMap = S3AUtils.getTrimmedStringCollectionSplitByEquals( configuration, "custom_key"); Assertions .assertThat(splitMap) .describedAs( "Map of key value pairs derived from config, split by equals(=) and comma(,)") .hasSize(2) .containsEntry("element.xyz.key1", "element.abc.val1") .containsEntry("element.xyz.key2", "element.abc.val2"); configuration.set("custom_key", "\nelement.xyz.key1 =element.abc.val1 \n" + ", element.xyz.key2=element.abc.val2,element.xyz.key3=element.abc.val3" + " , element.xyz.key4 =element.abc.val4,element.xyz.key5= " + "element.abc.val5 ,\n \n \n " + " element.xyz.key6 = element.abc.val6 \n , \n" + "element.xyz.key7=element.abc.val7,\n"); splitMap = S3AUtils.getTrimmedStringCollectionSplitByEquals(configuration, "custom_key"); Assertions .assertThat(splitMap) .describedAs( "Map of key value pairs derived from config, split by equals(=) and comma(,)") .hasSize(7) .containsEntry("element.xyz.key1", "element.abc.val1") .containsEntry("element.xyz.key2", "element.abc.val2") .containsEntry("element.xyz.key3", "element.abc.val3") .containsEntry("element.xyz.key4", "element.abc.val4") .containsEntry("element.xyz.key5", "element.abc.val5") .containsEntry("element.xyz.key6", "element.abc.val6") .containsEntry("element.xyz.key7", "element.abc.val7"); configuration.set("custom_key", "element.first.key1 = element.first.val2 ,element.first.key1 =element.first.val1"); splitMap = S3AUtils.getTrimmedStringCollectionSplitByEquals( configuration, "custom_key"); Assertions .assertThat(splitMap) .describedAs("Map of key value pairs split by equals(=) and comma(,)") .hasSize(1) .containsEntry("element.first.key1", "element.first.val1"); configuration.set("custom_key", ",,, , ,, ,element.first.key1 = element.first.val2 ," + "element.first.key1 = element.first.val1 , ,,, ,"); splitMap = S3AUtils.getTrimmedStringCollectionSplitByEquals( configuration, "custom_key"); Assertions .assertThat(splitMap) .describedAs("Map of key value pairs split by equals(=) and comma(,)") .hasSize(1) .containsEntry("element.first.key1", "element.first.val1"); configuration.set("custom_key", ",, , , ,, ,"); splitMap = S3AUtils.getTrimmedStringCollectionSplitByEquals( configuration, "custom_key"); Assertions .assertThat(splitMap) .describedAs("Map of key value pairs split by equals(=) and comma(,)") .hasSize(0); } /** * Validates that the argument provided is invalid by intercepting the expected * Exception. * * @param propKey The property key to validate. * @throws Exception If any error occurs. */ private static void expectInvalidArgument(final String propKey) throws Exception { final Configuration configuration = new Configuration(false); configuration.set("custom_key", propKey); intercept( IllegalArgumentException.class, STRING_COLLECTION_SPLIT_EQUALS_INVALID_ARG, () -> S3AUtils.getTrimmedStringCollectionSplitByEquals( configuration, "custom_key")); } /** * Tests for the string utility that will be used by S3A credentials provider. */ @Test public void testStringCollectionSplitByEqualsFailure() throws Exception { expectInvalidArgument(" = element.abc.val1"); expectInvalidArgument("=element.abc.val1"); expectInvalidArgument("= element.abc.val1"); expectInvalidArgument(" =element.abc.val1"); expectInvalidArgument("element.abc.key1="); expectInvalidArgument("element.abc.key1= "); expectInvalidArgument("element.abc.key1 ="); expectInvalidArgument("element.abc.key1 = "); expectInvalidArgument("="); expectInvalidArgument(" ="); expectInvalidArgument("= "); expectInvalidArgument(" = "); expectInvalidArgument("== = = ="); expectInvalidArgument(", = "); } /** * V2 credentials which raises an instantiation exception in * the factory method. */ public static final class V2CredentialProviderDoesNotInstantiate extends AbstractProvider { private V2CredentialProviderDoesNotInstantiate() { } public static AwsCredentialsProvider create() throws ClassNotFoundException { throw new ClassNotFoundException("simulated v2 CNFE"); } } }
apache/trafodion
35,264
core/sql/src/main/java/org/trafodion/sql/HDFSClient.java
// @@@ START COPYRIGHT @@@ // // Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. // // @@@ END COPYRIGHT @@@ package org.trafodion.sql; import java.io.IOException; import java.io.FileNotFoundException; import java.io.EOFException; import java.io.OutputStream; import java.io.InputStream; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import org.apache.log4j.PropertyConfigurator; import org.apache.log4j.Logger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.io.compress.CompressionInputStream; import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Writable; import java.io.EOFException; import java.util.concurrent.Callable; import java.util.concurrent.Future; import java.util.concurrent.Executors; import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import org.apache.hadoop.io.compress.CodecPool; import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.compress.Compressor; import org.apache.hadoop.io.compress.GzipCodec; import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.io.compress.CompressionCodecFactory; // // To read a range in a Hdfs file, use the constructor // public HDFSClient(int bufNo, int rangeNo, String filename, ByteBuffer buffer, long position, int length, CompressionInputStream inStream) // // For instance methods like hdfsListDirectory use the constructor // public HDFSClient() // // For all static methods use // HDFSClient::<static_method_name> // public class HDFSClient { // Keep the constants and string array below in sync with // enum CompressionMethod at sql/comexe/ComCompressionInfo.h static final short UNKNOWN_COMPRESSION = 0; static final short UNCOMPRESSED = 1; static final short LZOP = 5; static final String COMPRESSION_TYPE[] = { "UNKNOWN_COMPRESSION", // unable to determine compression method "UNCOMPRESSED", // file is not compressed "LZO_DEFLATE", // using LZO deflate compression "DEFLATE", // using DEFLATE compression "GZIP", // using GZIP compression "LZOP"}; // using LZOP compression static Logger logger_ = Logger.getLogger(HDFSClient.class.getName()); private static Configuration config_ = null; private static ExecutorService executorService_ = null; private static FileSystem defaultFs_ = null; private static CompressionCodecFactory codecFactory_ = null; private static boolean alluxioNotInstalled_ = false; private FileSystem fs_ = null; private int bufNo_; private int rangeNo_; private FSDataInputStream fsdis_; CompressionInputStream inStream_; private OutputStream outStream_; private String filename_; private ByteBuffer buf_; private ByteBuffer savedBuf_; private byte[] bufArray_; private int bufLen_; private int bufOffset_ = 0; private long pos_ = 0; private int len_ = 0; private int lenRemain_ = 0; private int blockSize_; private int bytesRead_; private Future future_ = null; private int isEOF_ = 0; private int totalBytesWritten_ = 0; private Path filepath_ = null; boolean compressed_ = false; private CompressionCodec codec_ = null; private short compressionType_; private int ioByteArraySizeInKB_; private boolean sequenceFile_; private byte recDelimiter_; private Writable key_; private Writable value_; private SequenceFile.Reader reader_; private SequenceFile.CompressionType seqCompressionType_; static { System.setProperty("hostName", System.getenv("HOSTNAME")); String confFile = System.getProperty("trafodion.log4j.configFile"); System.setProperty("trafodion.root", System.getenv("TRAF_HOME")); if (confFile == null) { confFile = System.getenv("TRAF_CONF") + "/log4j.sql.config"; } PropertyConfigurator.configure(confFile); config_ = TrafConfiguration.create(TrafConfiguration.HDFS_CONF); executorService_ = Executors.newCachedThreadPool(); try { defaultFs_ = FileSystem.get(config_); } catch (IOException ioe) { throw new RuntimeException("Exception in HDFSClient static block", ioe); } try { boolean alluxioFs = defaultFs_ instanceof alluxio.hadoop.FileSystem; } catch (Throwable rte) { // Ignore the exception. It is not needed for alluxio to be installed // for the methods of this class to work if // alluxio filesystem is NOT required alluxioNotInstalled_ = true; } codecFactory_ = new CompressionCodecFactory(config_); System.loadLibrary("executor"); } // The object instance that runs in the threadpool to read // the requested chunk in the range // FSDataInputStream.read method may not read the requested length in one shot // Loop to read the requested length or EOF is reached // Requested length can never be larger than the buffer size class HDFSRead implements Callable { HDFSRead() { } public Object call() throws IOException { int bytesRead; int totalBytesRead = 0; if (sequenceFile_) { // do nothing ; } else if (compressed_) { bufArray_ = new byte[ioByteArraySizeInKB_ * 1024]; } else { // alluxio doesn't support direct ByteBuffer reads // Hence, create a non-direct ByteBuffer, read into // byteArray backing up this ByteBuffer and // then copy the data read to direct ByteBuffer for the // native layer to process the data if ((! alluxioNotInstalled_) && fs_ instanceof alluxio.hadoop.FileSystem) { savedBuf_ = buf_; buf_ = ByteBuffer.allocate(savedBuf_.capacity()); } if (! buf_.hasArray()) { try { fsdis_.seek(pos_); } catch (EOFException e) { isEOF_ = 1; return new Integer(totalBytesRead); } } } do { if (sequenceFile_) bytesRead = sequenceFileRead(lenRemain_); else if (compressed_) bytesRead = compressedFileRead(lenRemain_); else { if (buf_.hasArray()) bytesRead = fsdis_.read(pos_, buf_.array(), bufOffset_, lenRemain_); else bytesRead = fsdis_.read(buf_); } if (bytesRead == -1) { isEOF_ = 1; break; } if (bytesRead == 0) break; totalBytesRead += bytesRead; if (totalBytesRead == bufLen_) break; bufOffset_ += bytesRead; pos_ += bytesRead; lenRemain_ -= bytesRead; } while (lenRemain_ > 0); if ((! alluxioNotInstalled_) && fs_ instanceof alluxio.hadoop.FileSystem) { if (totalBytesRead > 0) { byte[] temp = buf_.array(); savedBuf_.put(temp, 0, totalBytesRead); } } return new Integer(totalBytesRead); } } int compressedFileRead(int readLenRemain) throws IOException { int totalReadLen = 0; int readLen; int offset = 0; int retcode; int lenRemain = ((readLenRemain > bufArray_.length) ? bufArray_.length : readLenRemain); do { readLen = inStream_.read(bufArray_, offset, lenRemain); if (readLen == -1 || readLen == 0) break; totalReadLen += readLen; offset += readLen; lenRemain -= readLen; } while (lenRemain > 0); if (totalReadLen > 0) { if ((retcode = copyToByteBuffer(buf_, bufOffset_, bufArray_, totalReadLen)) != 0) throw new IOException("Failure to copy to the DirectByteBuffer in the native layer with error code " + retcode); } else totalReadLen = -1; return totalReadLen; } /* Trafodion adds record delimiter '\n' while copying it to buffer backing up the ByteBuffer */ int sequenceFileRead(int readLenRemain) throws IOException { boolean eof = false; boolean nextValue; byte[] byteArray; int readLen; int totalReadLen = 0; long tempPos; int lenRemain = readLenRemain; while (!eof && lenRemain > 0) { try { tempPos = reader_.getPosition(); nextValue = reader_.next(key_, value_); if (!nextValue) { eof = true; break; } } catch (java.io.EOFException e) { eof = true; break; } byteArray = ((Text)value_).getBytes(); readLen = ((Text)value_).getLength(); if ((readLen+1) <= lenRemain) { buf_.put(byteArray, 0, readLen); buf_.put(recDelimiter_); lenRemain -= (readLen+1); totalReadLen += (readLen+1); } else { // Reset the position because the row can't be copied to buffer try { reader_.sync(tempPos); } catch (java.io.EOFException e1) {} break; } } if (totalReadLen == 0) totalReadLen = -1; return totalReadLen; } native int copyToByteBuffer(ByteBuffer buf, int bufOffset, byte[] bufArray, int copyLen); public HDFSClient() { } // This constructor enables the hdfs data to be read in another thread while the previously // read buffer is being processed by the SQL engine // Opens the file and hands over the needed info to HdfsRead instance to read // The passed in length can never be more than the size of the buffer // If the range has a length more than the buffer length, the range is chunked // in HdfsScan public HDFSClient(int bufNo, int ioByteArraySizeInKB, int rangeNo, String filename, ByteBuffer buffer, long position, int length, short compressionType, boolean sequenceFile, byte recDelimiter, CompressionInputStream inStream) throws IOException, EOFException { bufNo_ = bufNo; rangeNo_ = rangeNo; filename_ = filename; ioByteArraySizeInKB_ = ioByteArraySizeInKB; filepath_ = new Path(filename_); fs_ = FileSystem.get(filepath_.toUri(),config_); compressionType_ = compressionType; inStream_ = inStream; sequenceFile_ = sequenceFile; recDelimiter_ = recDelimiter; if (sequenceFile_) { fsdis_ = null; inStream_ = null; } else { codec_ = codecFactory_.getCodec(filepath_); if (codec_ != null) { compressed_ = true; if (inStream_ == null) inStream_ = codec_.createInputStream(fs_.open(filepath_)); } else { if ((compressionType_ != UNCOMPRESSED) && (compressionType_ != UNKNOWN_COMPRESSION)) throw new IOException(COMPRESSION_TYPE[compressionType_] + " compression codec is not configured in Hadoop"); if (filename_.endsWith(".lzo")) throw new IOException(COMPRESSION_TYPE[LZOP] + " compression codec is not configured in Hadoop"); fsdis_ = fs_.open(filepath_); } } blockSize_ = (int)fs_.getDefaultBlockSize(filepath_); buf_ = buffer; bufOffset_ = 0; pos_ = position; len_ = length; if (buffer.hasArray()) bufLen_ = buffer.array().length; else { bufLen_ = buffer.capacity(); buf_.position(0); } lenRemain_ = (len_ > bufLen_) ? bufLen_ : len_; if (lenRemain_ != 0) { if (sequenceFile_) initSequenceFileRead(); future_ = executorService_.submit(new HDFSRead()); } } /* Trafodion support Sequence file with keys written via ByteWritble or Text class and value written via Text class. However, the key is completely ignored while reading the rows. The columns in the value is delimited by column delimiter 001(octal). */ public void initSequenceFileRead() throws IOException, EOFException { SequenceFile.Reader.Option seqLen = SequenceFile.Reader.length(lenRemain_); SequenceFile.Reader.Option seqFileName = SequenceFile.Reader.file(filepath_); reader_ = new SequenceFile.Reader(config_, seqLen, seqFileName); seqCompressionType_ = reader_.getCompressionType(); if (seqCompressionType_ == SequenceFile.CompressionType.NONE) compressed_ = false; else compressed_ = true; if (compressed_ && pos_ != 0) throw new IOException("Spliting of compressed sequence file is not supported"); try { reader_.sync(pos_); } catch (EOFException e) {} String keyClass = reader_.getKeyClassName(); String valueClass = reader_.getValueClassName(); if (! valueClass.equals("org.apache.hadoop.io.Text")) throw new IOException("Sequence File with the value class of type " + valueClass + " is not supported"); if (!(keyClass.equals("org.apache.hadoop.io.Text") || keyClass.equals("org.apache.hadoop.io.BytesWritable"))) throw new IOException("Sequence File with the key class of type " + keyClass + " is not supported"); key_ = (Writable) ReflectionUtils.newInstance(reader_.getKeyClass(), config_); value_ = (Writable) ReflectionUtils.newInstance(reader_.getValueClass(), config_); } // This method waits for the read to complete. Read can complete due to one of the following // a) buffer is full // b) EOF is reached // c) An exception is encountered while reading the file public int trafHdfsReadBuffer() throws IOException, InterruptedException, ExecutionException { Integer retObject = 0; int bytesRead; retObject = (Integer)future_.get(); bytesRead = retObject.intValue(); if (! compressed_ && fsdis_ != null) fsdis_.close(); fsdis_ = null; return bytesRead; } public int getRangeNo() { return rangeNo_; } public int isEOF() { return isEOF_; } boolean hdfsCreate(String fname , boolean overwrite, boolean append, boolean compress) throws IOException { boolean fileExists = false; filename_ = fname; if (logger_.isDebugEnabled()) logger_.debug("HDFSClient.hdfsCreate() - started" ); if (!compress || (compress && fname.endsWith(".gz"))) filepath_ = new Path(fname); else filepath_ = new Path(fname + ".gz"); fs_ = FileSystem.get(filepath_.toUri(),config_); compressed_ = compress; fsdis_ = null; if (fs_.exists(filepath_)) { if (overwrite) fs_.delete(filepath_); else if (!append) throw new IOException(filepath_ + " already exists"); else fileExists = true; } FSDataOutputStream fsOut = null; if (!fileExists) { fsOut = fs_.create(filepath_); fsOut.close(); } return true; } boolean hdfsOpen(String fname , boolean compress) throws IOException { filename_ = fname; if (logger_.isDebugEnabled()) logger_.debug("HDFSClient.hdfsOpen() - started" ); if (!compress || (compress && fname.endsWith(".gz"))) filepath_ = new Path(fname); else filepath_ = new Path(fname + ".gz"); fs_ = FileSystem.get(filepath_.toUri(),config_); compressed_ = compress; outStream_ = null; fsdis_ = null; return true; } long hdfsSize() throws IOException { FileStatus filestatus; try { filestatus = fs_.getFileStatus(filepath_); } catch (java.io.FileNotFoundException e) { return 0; } if (filestatus.isFile()) return filestatus.getLen(); else return -1; } long hdfsWriteImmediate(byte[] buff, boolean doRetry) throws IOException { if (logger_.isDebugEnabled()) logger_.debug("HDFSClient.hdfsWriteImmediate() - started" ); FSDataOutputStream fsOut=null; FileStatus filestatus = null; long writeOffset=0; long writeEndOffset =0; int trycount = 0; while (trycount < 3) { try { if (logger_.isDebugEnabled()) logger_.debug("HDFSClient.hdfsWriteImmediate() - started" ); if (fs_.exists(filepath_)) { filestatus = fs_.getFileStatus(filepath_); fsOut = fs_.append(filepath_); writeOffset = filestatus.getLen(); } else { fsOut = fs_.create(filepath_); writeOffset = 0; } fsOut.write(buff); fsOut.hflush(); writeEndOffset = fsOut.getPos(); } finally { if (fsOut != null ) fsOut.close(); } // Do a checksum to ensure writeOffset is really the right value. if (doRetry && (buff.length != writeEndOffset-writeOffset)) { trycount++; if (trycount == 3) { throw new IOException("HDFSwrite did not succeed due to multiple concurrent writes "); } } else trycount = 100; //if doRetry is false or writeOffset is ok } //while return writeOffset; } int hdfsWrite(byte[] buff) throws IOException { if (logger_.isDebugEnabled()) logger_.debug("HDFSClient.hdfsWrite() - started" ); FSDataOutputStream fsOut; if (outStream_ == null) { if (fs_.exists(filepath_)) fsOut = fs_.append(filepath_); else fsOut = fs_.create(filepath_); if (compressed_) { GzipCodec gzipCodec = (GzipCodec) ReflectionUtils.newInstance( GzipCodec.class, config_); Compressor gzipCompressor = CodecPool.getCompressor(gzipCodec); outStream_= gzipCodec.createOutputStream(fsOut, gzipCompressor); } else outStream_ = fsOut; if (logger_.isDebugEnabled()) logger_.debug("HDFSClient.hdfsWrite() - output stream created" ); } outStream_.write(buff); if (outStream_ instanceof FSDataOutputStream) ((FSDataOutputStream)outStream_).hsync(); if (logger_.isDebugEnabled()) logger_.debug("HDFSClient.hdfsWrite() - bytes written " + buff.length); return buff.length; } int hdfsRead(long pos, ByteBuffer buffer) throws IOException { if (logger_.isDebugEnabled()) logger_.debug("HDFSClient.hdfsRead() - started" ); if (fsdis_ == null && inStream_ == null ) { try { codec_ = codecFactory_.getCodec(filepath_); if (codec_ != null) { compressed_ = true; inStream_ = codec_.createInputStream(fs_.open(filepath_)); } else fsdis_ = fs_.open(filepath_); } catch (java.io.FileNotFoundException e) { return 0; } } int lenRemain; int bytesRead; int totalBytesRead = 0; int bufLen; int bufOffset = 0; if (compressed_) { if (pos != 0 && pos != -1) throw new IOException("Compressed files can be read from a non-zero position"); else pos_ = 0; } else if (pos != -1) pos_ = pos; if (compressed_ && bufArray_ != null) bufArray_ = new byte[ioByteArraySizeInKB_ * 1024]; if (buffer.hasArray()) bufLen = buffer.array().length; else { if (pos_ != -1) fsdis_.seek(pos_); bufLen = buffer.capacity(); } lenRemain = bufLen; do { if (compressed_) { bytesRead = compressedFileRead(lenRemain); } else { if (buffer.hasArray()) bytesRead = fsdis_.read(pos_, buffer.array(), bufOffset, lenRemain); else bytesRead = fsdis_.read(buffer); } if (bytesRead == -1 || bytesRead == 0) break; totalBytesRead += bytesRead; pos_ += bytesRead; lenRemain -= bytesRead; } while (lenRemain > 0); return totalBytesRead; } boolean hdfsClose() throws IOException { if (logger_.isDebugEnabled()) logger_.debug("HDFSClient.hdfsClose() - started" ); if (outStream_ != null) { outStream_.close(); outStream_ = null; } if (fsdis_ != null) fsdis_.close(); return true; } static long hdfsSize(String filename) throws IOException { Path filepath = new Path(filename); FileSystem fs = FileSystem.get(filepath.toUri(),config_); FileStatus filestatus; try { filestatus = fs.getFileStatus(filepath); } catch (java.io.FileNotFoundException e) { return 0; } if (filestatus.isFile()) return filestatus.getLen(); else return -1; } public static boolean hdfsMergeFiles(String srcPathStr, String dstPathStr) throws IOException { if (logger_.isDebugEnabled()) logger_.debug("HDFSClient.hdfsMergeFiles() - start"); if (logger_.isDebugEnabled()) logger_.debug("HDFSClient.hdfsMergeFiles() - source Path: " + srcPathStr + ", destination File:" + dstPathStr ); Path srcPath = new Path(srcPathStr ); srcPath = srcPath.makeQualified(srcPath.toUri(), null); FileSystem srcFs = FileSystem.get(srcPath.toUri(),config_); Path dstPath = new Path(dstPathStr); dstPath = dstPath.makeQualified(dstPath.toUri(), null); FileSystem dstFs = FileSystem.get(dstPath.toUri(),config_); if (dstFs.exists(dstPath)) { if (logger_.isDebugEnabled()) logger_.debug("HDFSClient.hdfsMergeFiles() - destination files exists" ); // for this prototype we just delete the file-- will change in next code drops dstFs.delete(dstPath, false); // The caller should already have checked existence of file-- throw exception //throw new FileAlreadyExistsException(dstPath.toString()); } Path tmpSrcPath = new Path(srcPath, "tmp"); FileSystem.mkdirs(srcFs, tmpSrcPath,srcFs.getFileStatus(srcPath).getPermission()); logger_.debug("HDFSClient.hdfsMergeFiles() - tmp folder created." ); Path[] files = FileUtil.stat2Paths(srcFs.listStatus(srcPath)); for (Path f : files) { srcFs.rename(f, tmpSrcPath); } // copyMerge and use false for the delete option since it removes the whole directory if (logger_.isDebugEnabled()) logger_.debug("HDFSClient.hdfsMergeFiles() - copyMerge" ); FileUtil.copyMerge(srcFs, tmpSrcPath, dstFs, dstPath, false, config_, null); if (logger_.isDebugEnabled()) logger_.debug("HDFSClient.hdfsMergeFiles() - delete intermediate files" ); srcFs.delete(tmpSrcPath, true); return true; } public static boolean hdfsCleanUnloadPath(String uldPathStr /*, boolean checkExistence, String mergeFileStr*/) throws IOException { if (logger_.isDebugEnabled()) logger_.debug("HDFSClient.hdfsCleanUnloadPath() - unload Path: " + uldPathStr ); Path uldPath = new Path(uldPathStr ); FileSystem fs = FileSystem.get(uldPath.toUri(), config_); if (!fs.exists(uldPath)) { //unload location does not exist. hdfscreate will create it later //nothing to do return true; } Path[] files = FileUtil.stat2Paths(fs.listStatus(uldPath)); if (logger_.isDebugEnabled()) logger_.debug("HDFSClient.hdfsCleanUnloadPath() - delete files" ); for (Path f : files){ fs.delete(f, false); } return true; } public static boolean hdfsExists(String filePathStr) throws IOException { if (logger_.isDebugEnabled()) logger_.debug("HDFSClient.hdfsExists() - Path: " + filePathStr); Path filePath = new Path(filePathStr ); FileSystem fs = FileSystem.get(filePath.toUri(), config_); if (fs.exists(filePath)) return true; return false; } public static boolean hdfsDeletePath(String pathStr) throws IOException { if (logger_.isDebugEnabled()) logger_.debug("HDFSClient.hdfsDeletePath() - start - Path: " + pathStr); Path delPath = new Path(pathStr ); FileSystem fs = FileSystem.get(delPath.toUri(), config_); fs.delete(delPath, true); return true; } public static boolean hdfsDeleteFiles(String dirPathStr, String startingFileName) throws IOException { if (logger_.isDebugEnabled()) logger_.debug("HDFSClient.hdfsDeleteFiles(" + dirPathStr + ", " + startingFileName +")"); Path dirPath = new Path(dirPathStr ); FileSystem fs = FileSystem.get(dirPath.toUri(), config_); FileStatus[] fileStatus; if (fs.isDirectory(dirPath)) fileStatus = fs.listStatus(dirPath); else throw new IOException("The path " + dirPath + " is not a directory"); FileStatus aFileStatus; if (fileStatus != null) { for (int i = 0; i < fileStatus.length; i++) { aFileStatus = fileStatus[i]; if (! aFileStatus.isDirectory()) { String pathName = aFileStatus.getPath().toString(); String filenameParts[] = pathName.split(dirPathStr); if (filenameParts.length == 2 && filenameParts[1].startsWith(startingFileName)) fs.delete(aFileStatus.getPath()); } } } return true; } public int hdfsListDirectory(String pathStr, long hdfsClientJniObj) throws IOException { if (logger_.isDebugEnabled()) logger_.debug("HDFSClient.hdfsListDirectory() - start - Path: " + pathStr); Path listPath = new Path(pathStr ); FileSystem fs = FileSystem.get(listPath.toUri(), config_); FileStatus[] fileStatus; if (fs.isDirectory(listPath)) fileStatus = fs.listStatus(listPath); else throw new IOException("The path " + listPath + "is not a directory"); FileStatus aFileStatus; int retcode; if (fileStatus != null) { for (int i = 0; i < fileStatus.length; i++) { aFileStatus = fileStatus[i]; retcode = sendFileStatus(hdfsClientJniObj, fileStatus.length, i, aFileStatus.isDirectory(), aFileStatus.getPath().toString(), aFileStatus.getModificationTime(), aFileStatus.getLen(), aFileStatus.getReplication(), aFileStatus.getBlockSize(), aFileStatus.getOwner(), aFileStatus.getGroup(), aFileStatus.getPermission().toShort(), aFileStatus.getAccessTime()); if (retcode != 0) throw new IOException("Error " + retcode + " while sending the file status info for file " + aFileStatus.getPath().toString()); } return fileStatus.length; } else return 0; } public void stop() throws IOException { if (future_ != null) { try { future_.get(30, TimeUnit.SECONDS); } catch(TimeoutException e) { logger_.error("Asynchronous Thread of HdfsScan is Cancelled (timeout), ", e); future_.cancel(true); } catch(InterruptedException e) { logger_.error("Asynchronous Thread of HdfsScan is Cancelled (interrupt), ", e); future_.cancel(true); // Interrupt the thread } catch (ExecutionException ee) { } future_ = null; } } public static void shutdown() throws InterruptedException { executorService_.awaitTermination(100, TimeUnit.MILLISECONDS); executorService_.shutdown(); } private static FileSystem getFileSystem() throws IOException { return defaultFs_; } // if levelDeep = 0, return the max modification timestamp of the passed-in HDFS URIs // (a tab-separated list of 0 or more paths) // if levelDeep > 0, also check all directories "levelDeep" levels below. Exclude // directories that start with a dot (hidden directories) public static long getHiveTableMaxModificationTs( String stableDirPaths, int levelDeep) throws FileNotFoundException, IOException { long result = 0; if (logger_.isDebugEnabled()) logger_.debug("HDFSClient:getHiveTableMaxModificationTs enter"); String[] tableDirPaths = stableDirPaths.split("\t"); // account for root dir for (int i=0; i<tableDirPaths.length; i++) { FileStatus r = getFileSystem().getFileStatus(new Path(tableDirPaths[i]));// super fast API, return in .2ms if (r != null && r.getModificationTime() > result) result = r.getModificationTime(); } if (levelDeep>0) { Path[] paths = new Path[tableDirPaths.length]; for (int i=0; i<tableDirPaths.length; i++) paths[i] = new Path(tableDirPaths[i]); long l = getHiveTableMaxModificationTs2(paths,levelDeep); if (l > result) result = l; } if (logger_.isDebugEnabled()) logger_.debug("HDFSClient:getHiveTableMaxModificationTs "+stableDirPaths+" levelDeep"+levelDeep+":"+result); return result; } private static long getHiveTableMaxModificationTs2(Path[] paths, int levelDeep)throws FileNotFoundException, IOException { long result = 0; PathFilter filter = new PathFilter(){ public boolean accept(Path file){ return !file.getName().startsWith(".");//filter out hidden files and directories } }; FileStatus[] fileStatuss=null; if (levelDeep == 1){ // stop condition on recursive function //check parent level (important for deletes): for (Path path : paths){ FileStatus r = getFileSystem().getFileStatus(path);// super fast API, return in .2ms if (r != null && r.getModificationTime()>result) result = r.getModificationTime(); } if (paths.length==1) fileStatuss = getFileSystem().listStatus(paths[0],filter);// minor optimization. avoid using list based API when not needed else fileStatuss = getFileSystem().listStatus(paths,filter); for(int i=0;i<fileStatuss.length;i++) if (fileStatuss[i].isDirectory() && fileStatuss[i].getModificationTime()>result) result = fileStatuss[i].getModificationTime(); }else{//here levelDeep >1 List<Path> pathList = new ArrayList<Path>(); if (paths.length==1) fileStatuss = getFileSystem().listStatus(paths[0],filter);// minor optimization. avoid using list based API when not needed else fileStatuss = getFileSystem().listStatus(paths,filter); for(int i=0;i<fileStatuss.length;i++) if (fileStatuss[i].isDirectory()) { pathList.add(fileStatuss[i].getPath()); if (fileStatuss[i].getModificationTime()>result) result = fileStatuss[i].getModificationTime();// make sure level n-1 is accounted for for delete partition case } long l = getHiveTableMaxModificationTs2(pathList.toArray(new Path[pathList.size()]),levelDeep-1); if (l>result) result = l; } return result; } public static String getFsDefaultName() { String uri = config_.get("fs.defaultFS"); return uri; } public static boolean hdfsCreateDirectory(String pathStr) throws IOException { if (logger_.isDebugEnabled()) logger_.debug("HDFSClient.hdfsCreateDirectory()" + pathStr); Path dirPath = new Path(pathStr ); FileSystem fs = FileSystem.get(dirPath.toUri(), config_); fs.mkdirs(dirPath); return true; } public static boolean hdfsRename(String fromPathStr, String toPathStr) throws IOException { if (logger_.isDebugEnabled()) logger_.debug("HDFSClient.hdfsRename(" + fromPathStr + ", " + toPathStr + ")"); Path fromPath = new Path(fromPathStr ); Path toPath = new Path(toPathStr ); FileSystem fs = FileSystem.get(fromPath.toUri(), config_); fs.rename(fromPath, toPath); return true; } private native int sendFileStatus(long jniObj, int numFiles, int fileNo, boolean isDir, String filename, long modTime, long len, short numReplicas, long blockSize, String owner, String group, short permissions, long accessTime); }
googleapis/google-cloud-java
35,045
java-filestore/proto-google-cloud-filestore-v1/src/main/java/com/google/cloud/filestore/v1/UpdateInstanceRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/filestore/v1/cloud_filestore_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.filestore.v1; /** * * * <pre> * UpdateInstanceRequest updates the settings of an instance. * </pre> * * Protobuf type {@code google.cloud.filestore.v1.UpdateInstanceRequest} */ public final class UpdateInstanceRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.filestore.v1.UpdateInstanceRequest) UpdateInstanceRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateInstanceRequest.newBuilder() to construct. private UpdateInstanceRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateInstanceRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateInstanceRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.filestore.v1.CloudFilestoreServiceProto .internal_static_google_cloud_filestore_v1_UpdateInstanceRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.filestore.v1.CloudFilestoreServiceProto .internal_static_google_cloud_filestore_v1_UpdateInstanceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.filestore.v1.UpdateInstanceRequest.class, com.google.cloud.filestore.v1.UpdateInstanceRequest.Builder.class); } private int bitField0_; public static final int UPDATE_MASK_FIELD_NUMBER = 1; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Mask of fields to update. At least one path must be supplied in this * field. The elements of the repeated paths field may only include these * fields: * * * "description" * * "file_shares" * * "labels" * * "performance_config" * * "deletion_protection_enabled" * * "deletion_protection_reason" * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1;</code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Mask of fields to update. At least one path must be supplied in this * field. The elements of the repeated paths field may only include these * fields: * * * "description" * * "file_shares" * * "labels" * * "performance_config" * * "deletion_protection_enabled" * * "deletion_protection_reason" * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1;</code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Mask of fields to update. At least one path must be supplied in this * field. The elements of the repeated paths field may only include these * fields: * * * "description" * * "file_shares" * * "labels" * * "performance_config" * * "deletion_protection_enabled" * * "deletion_protection_reason" * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } public static final int INSTANCE_FIELD_NUMBER = 2; private com.google.cloud.filestore.v1.Instance instance_; /** * * * <pre> * Only fields specified in update_mask are updated. * </pre> * * <code>.google.cloud.filestore.v1.Instance instance = 2;</code> * * @return Whether the instance field is set. */ @java.lang.Override public boolean hasInstance() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Only fields specified in update_mask are updated. * </pre> * * <code>.google.cloud.filestore.v1.Instance instance = 2;</code> * * @return The instance. */ @java.lang.Override public com.google.cloud.filestore.v1.Instance getInstance() { return instance_ == null ? com.google.cloud.filestore.v1.Instance.getDefaultInstance() : instance_; } /** * * * <pre> * Only fields specified in update_mask are updated. * </pre> * * <code>.google.cloud.filestore.v1.Instance instance = 2;</code> */ @java.lang.Override public com.google.cloud.filestore.v1.InstanceOrBuilder getInstanceOrBuilder() { return instance_ == null ? com.google.cloud.filestore.v1.Instance.getDefaultInstance() : instance_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getUpdateMask()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getInstance()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getUpdateMask()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getInstance()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.filestore.v1.UpdateInstanceRequest)) { return super.equals(obj); } com.google.cloud.filestore.v1.UpdateInstanceRequest other = (com.google.cloud.filestore.v1.UpdateInstanceRequest) obj; if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (hasInstance() != other.hasInstance()) return false; if (hasInstance()) { if (!getInstance().equals(other.getInstance())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } if (hasInstance()) { hash = (37 * hash) + INSTANCE_FIELD_NUMBER; hash = (53 * hash) + getInstance().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.filestore.v1.UpdateInstanceRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.filestore.v1.UpdateInstanceRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.filestore.v1.UpdateInstanceRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.filestore.v1.UpdateInstanceRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.filestore.v1.UpdateInstanceRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.filestore.v1.UpdateInstanceRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.filestore.v1.UpdateInstanceRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.filestore.v1.UpdateInstanceRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.filestore.v1.UpdateInstanceRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.filestore.v1.UpdateInstanceRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.filestore.v1.UpdateInstanceRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.filestore.v1.UpdateInstanceRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.filestore.v1.UpdateInstanceRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * UpdateInstanceRequest updates the settings of an instance. * </pre> * * Protobuf type {@code google.cloud.filestore.v1.UpdateInstanceRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.filestore.v1.UpdateInstanceRequest) com.google.cloud.filestore.v1.UpdateInstanceRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.filestore.v1.CloudFilestoreServiceProto .internal_static_google_cloud_filestore_v1_UpdateInstanceRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.filestore.v1.CloudFilestoreServiceProto .internal_static_google_cloud_filestore_v1_UpdateInstanceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.filestore.v1.UpdateInstanceRequest.class, com.google.cloud.filestore.v1.UpdateInstanceRequest.Builder.class); } // Construct using com.google.cloud.filestore.v1.UpdateInstanceRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getUpdateMaskFieldBuilder(); getInstanceFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } instance_ = null; if (instanceBuilder_ != null) { instanceBuilder_.dispose(); instanceBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.filestore.v1.CloudFilestoreServiceProto .internal_static_google_cloud_filestore_v1_UpdateInstanceRequest_descriptor; } @java.lang.Override public com.google.cloud.filestore.v1.UpdateInstanceRequest getDefaultInstanceForType() { return com.google.cloud.filestore.v1.UpdateInstanceRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.filestore.v1.UpdateInstanceRequest build() { com.google.cloud.filestore.v1.UpdateInstanceRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.filestore.v1.UpdateInstanceRequest buildPartial() { com.google.cloud.filestore.v1.UpdateInstanceRequest result = new com.google.cloud.filestore.v1.UpdateInstanceRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.filestore.v1.UpdateInstanceRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.instance_ = instanceBuilder_ == null ? instance_ : instanceBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.filestore.v1.UpdateInstanceRequest) { return mergeFrom((com.google.cloud.filestore.v1.UpdateInstanceRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.filestore.v1.UpdateInstanceRequest other) { if (other == com.google.cloud.filestore.v1.UpdateInstanceRequest.getDefaultInstance()) return this; if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } if (other.hasInstance()) { mergeInstance(other.getInstance()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getInstanceFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Mask of fields to update. At least one path must be supplied in this * field. The elements of the repeated paths field may only include these * fields: * * * "description" * * "file_shares" * * "labels" * * "performance_config" * * "deletion_protection_enabled" * * "deletion_protection_reason" * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1;</code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Mask of fields to update. At least one path must be supplied in this * field. The elements of the repeated paths field may only include these * fields: * * * "description" * * "file_shares" * * "labels" * * "performance_config" * * "deletion_protection_enabled" * * "deletion_protection_reason" * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1;</code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Mask of fields to update. At least one path must be supplied in this * field. The elements of the repeated paths field may only include these * fields: * * * "description" * * "file_shares" * * "labels" * * "performance_config" * * "deletion_protection_enabled" * * "deletion_protection_reason" * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Mask of fields to update. At least one path must be supplied in this * field. The elements of the repeated paths field may only include these * fields: * * * "description" * * "file_shares" * * "labels" * * "performance_config" * * "deletion_protection_enabled" * * "deletion_protection_reason" * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Mask of fields to update. At least one path must be supplied in this * field. The elements of the repeated paths field may only include these * fields: * * * "description" * * "file_shares" * * "labels" * * "performance_config" * * "deletion_protection_enabled" * * "deletion_protection_reason" * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Mask of fields to update. At least one path must be supplied in this * field. The elements of the repeated paths field may only include these * fields: * * * "description" * * "file_shares" * * "labels" * * "performance_config" * * "deletion_protection_enabled" * * "deletion_protection_reason" * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000001); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Mask of fields to update. At least one path must be supplied in this * field. The elements of the repeated paths field may only include these * fields: * * * "description" * * "file_shares" * * "labels" * * "performance_config" * * "deletion_protection_enabled" * * "deletion_protection_reason" * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000001; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Mask of fields to update. At least one path must be supplied in this * field. The elements of the repeated paths field may only include these * fields: * * * "description" * * "file_shares" * * "labels" * * "performance_config" * * "deletion_protection_enabled" * * "deletion_protection_reason" * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Mask of fields to update. At least one path must be supplied in this * field. The elements of the repeated paths field may only include these * fields: * * * "description" * * "file_shares" * * "labels" * * "performance_config" * * "deletion_protection_enabled" * * "deletion_protection_reason" * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } private com.google.cloud.filestore.v1.Instance instance_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.filestore.v1.Instance, com.google.cloud.filestore.v1.Instance.Builder, com.google.cloud.filestore.v1.InstanceOrBuilder> instanceBuilder_; /** * * * <pre> * Only fields specified in update_mask are updated. * </pre> * * <code>.google.cloud.filestore.v1.Instance instance = 2;</code> * * @return Whether the instance field is set. */ public boolean hasInstance() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Only fields specified in update_mask are updated. * </pre> * * <code>.google.cloud.filestore.v1.Instance instance = 2;</code> * * @return The instance. */ public com.google.cloud.filestore.v1.Instance getInstance() { if (instanceBuilder_ == null) { return instance_ == null ? com.google.cloud.filestore.v1.Instance.getDefaultInstance() : instance_; } else { return instanceBuilder_.getMessage(); } } /** * * * <pre> * Only fields specified in update_mask are updated. * </pre> * * <code>.google.cloud.filestore.v1.Instance instance = 2;</code> */ public Builder setInstance(com.google.cloud.filestore.v1.Instance value) { if (instanceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } instance_ = value; } else { instanceBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Only fields specified in update_mask are updated. * </pre> * * <code>.google.cloud.filestore.v1.Instance instance = 2;</code> */ public Builder setInstance(com.google.cloud.filestore.v1.Instance.Builder builderForValue) { if (instanceBuilder_ == null) { instance_ = builderForValue.build(); } else { instanceBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Only fields specified in update_mask are updated. * </pre> * * <code>.google.cloud.filestore.v1.Instance instance = 2;</code> */ public Builder mergeInstance(com.google.cloud.filestore.v1.Instance value) { if (instanceBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && instance_ != null && instance_ != com.google.cloud.filestore.v1.Instance.getDefaultInstance()) { getInstanceBuilder().mergeFrom(value); } else { instance_ = value; } } else { instanceBuilder_.mergeFrom(value); } if (instance_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Only fields specified in update_mask are updated. * </pre> * * <code>.google.cloud.filestore.v1.Instance instance = 2;</code> */ public Builder clearInstance() { bitField0_ = (bitField0_ & ~0x00000002); instance_ = null; if (instanceBuilder_ != null) { instanceBuilder_.dispose(); instanceBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Only fields specified in update_mask are updated. * </pre> * * <code>.google.cloud.filestore.v1.Instance instance = 2;</code> */ public com.google.cloud.filestore.v1.Instance.Builder getInstanceBuilder() { bitField0_ |= 0x00000002; onChanged(); return getInstanceFieldBuilder().getBuilder(); } /** * * * <pre> * Only fields specified in update_mask are updated. * </pre> * * <code>.google.cloud.filestore.v1.Instance instance = 2;</code> */ public com.google.cloud.filestore.v1.InstanceOrBuilder getInstanceOrBuilder() { if (instanceBuilder_ != null) { return instanceBuilder_.getMessageOrBuilder(); } else { return instance_ == null ? com.google.cloud.filestore.v1.Instance.getDefaultInstance() : instance_; } } /** * * * <pre> * Only fields specified in update_mask are updated. * </pre> * * <code>.google.cloud.filestore.v1.Instance instance = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.filestore.v1.Instance, com.google.cloud.filestore.v1.Instance.Builder, com.google.cloud.filestore.v1.InstanceOrBuilder> getInstanceFieldBuilder() { if (instanceBuilder_ == null) { instanceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.filestore.v1.Instance, com.google.cloud.filestore.v1.Instance.Builder, com.google.cloud.filestore.v1.InstanceOrBuilder>( getInstance(), getParentForChildren(), isClean()); instance_ = null; } return instanceBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.filestore.v1.UpdateInstanceRequest) } // @@protoc_insertion_point(class_scope:google.cloud.filestore.v1.UpdateInstanceRequest) private static final com.google.cloud.filestore.v1.UpdateInstanceRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.filestore.v1.UpdateInstanceRequest(); } public static com.google.cloud.filestore.v1.UpdateInstanceRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateInstanceRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateInstanceRequest>() { @java.lang.Override public UpdateInstanceRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateInstanceRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateInstanceRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.filestore.v1.UpdateInstanceRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,258
java-retail/proto-google-cloud-retail-v2/src/main/java/com/google/cloud/retail/v2/ListGenerativeQuestionConfigsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/retail/v2/generative_question_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.retail.v2; /** * * * <pre> * Response for ListQuestions method. * </pre> * * Protobuf type {@code google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse} */ public final class ListGenerativeQuestionConfigsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse) ListGenerativeQuestionConfigsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListGenerativeQuestionConfigsResponse.newBuilder() to construct. private ListGenerativeQuestionConfigsResponse( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListGenerativeQuestionConfigsResponse() { generativeQuestionConfigs_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListGenerativeQuestionConfigsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.retail.v2.GenerativeQuestionServiceProto .internal_static_google_cloud_retail_v2_ListGenerativeQuestionConfigsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.retail.v2.GenerativeQuestionServiceProto .internal_static_google_cloud_retail_v2_ListGenerativeQuestionConfigsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse.class, com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse.Builder.class); } public static final int GENERATIVE_QUESTION_CONFIGS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.retail.v2.GenerativeQuestionConfig> generativeQuestionConfigs_; /** * * * <pre> * All the questions for a given catalog. * </pre> * * <code> * repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1; * </code> */ @java.lang.Override public java.util.List<com.google.cloud.retail.v2.GenerativeQuestionConfig> getGenerativeQuestionConfigsList() { return generativeQuestionConfigs_; } /** * * * <pre> * All the questions for a given catalog. * </pre> * * <code> * repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1; * </code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.retail.v2.GenerativeQuestionConfigOrBuilder> getGenerativeQuestionConfigsOrBuilderList() { return generativeQuestionConfigs_; } /** * * * <pre> * All the questions for a given catalog. * </pre> * * <code> * repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1; * </code> */ @java.lang.Override public int getGenerativeQuestionConfigsCount() { return generativeQuestionConfigs_.size(); } /** * * * <pre> * All the questions for a given catalog. * </pre> * * <code> * repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1; * </code> */ @java.lang.Override public com.google.cloud.retail.v2.GenerativeQuestionConfig getGenerativeQuestionConfigs( int index) { return generativeQuestionConfigs_.get(index); } /** * * * <pre> * All the questions for a given catalog. * </pre> * * <code> * repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1; * </code> */ @java.lang.Override public com.google.cloud.retail.v2.GenerativeQuestionConfigOrBuilder getGenerativeQuestionConfigsOrBuilder(int index) { return generativeQuestionConfigs_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < generativeQuestionConfigs_.size(); i++) { output.writeMessage(1, generativeQuestionConfigs_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < generativeQuestionConfigs_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 1, generativeQuestionConfigs_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse)) { return super.equals(obj); } com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse other = (com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse) obj; if (!getGenerativeQuestionConfigsList().equals(other.getGenerativeQuestionConfigsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getGenerativeQuestionConfigsCount() > 0) { hash = (37 * hash) + GENERATIVE_QUESTION_CONFIGS_FIELD_NUMBER; hash = (53 * hash) + getGenerativeQuestionConfigsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response for ListQuestions method. * </pre> * * Protobuf type {@code google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse) com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.retail.v2.GenerativeQuestionServiceProto .internal_static_google_cloud_retail_v2_ListGenerativeQuestionConfigsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.retail.v2.GenerativeQuestionServiceProto .internal_static_google_cloud_retail_v2_ListGenerativeQuestionConfigsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse.class, com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse.Builder.class); } // Construct using com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (generativeQuestionConfigsBuilder_ == null) { generativeQuestionConfigs_ = java.util.Collections.emptyList(); } else { generativeQuestionConfigs_ = null; generativeQuestionConfigsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.retail.v2.GenerativeQuestionServiceProto .internal_static_google_cloud_retail_v2_ListGenerativeQuestionConfigsResponse_descriptor; } @java.lang.Override public com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse getDefaultInstanceForType() { return com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse build() { com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse buildPartial() { com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse result = new com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse result) { if (generativeQuestionConfigsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { generativeQuestionConfigs_ = java.util.Collections.unmodifiableList(generativeQuestionConfigs_); bitField0_ = (bitField0_ & ~0x00000001); } result.generativeQuestionConfigs_ = generativeQuestionConfigs_; } else { result.generativeQuestionConfigs_ = generativeQuestionConfigsBuilder_.build(); } } private void buildPartial0( com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse) { return mergeFrom((com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse other) { if (other == com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse.getDefaultInstance()) return this; if (generativeQuestionConfigsBuilder_ == null) { if (!other.generativeQuestionConfigs_.isEmpty()) { if (generativeQuestionConfigs_.isEmpty()) { generativeQuestionConfigs_ = other.generativeQuestionConfigs_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureGenerativeQuestionConfigsIsMutable(); generativeQuestionConfigs_.addAll(other.generativeQuestionConfigs_); } onChanged(); } } else { if (!other.generativeQuestionConfigs_.isEmpty()) { if (generativeQuestionConfigsBuilder_.isEmpty()) { generativeQuestionConfigsBuilder_.dispose(); generativeQuestionConfigsBuilder_ = null; generativeQuestionConfigs_ = other.generativeQuestionConfigs_; bitField0_ = (bitField0_ & ~0x00000001); generativeQuestionConfigsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getGenerativeQuestionConfigsFieldBuilder() : null; } else { generativeQuestionConfigsBuilder_.addAllMessages(other.generativeQuestionConfigs_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.retail.v2.GenerativeQuestionConfig m = input.readMessage( com.google.cloud.retail.v2.GenerativeQuestionConfig.parser(), extensionRegistry); if (generativeQuestionConfigsBuilder_ == null) { ensureGenerativeQuestionConfigsIsMutable(); generativeQuestionConfigs_.add(m); } else { generativeQuestionConfigsBuilder_.addMessage(m); } break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.retail.v2.GenerativeQuestionConfig> generativeQuestionConfigs_ = java.util.Collections.emptyList(); private void ensureGenerativeQuestionConfigsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { generativeQuestionConfigs_ = new java.util.ArrayList<com.google.cloud.retail.v2.GenerativeQuestionConfig>( generativeQuestionConfigs_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.retail.v2.GenerativeQuestionConfig, com.google.cloud.retail.v2.GenerativeQuestionConfig.Builder, com.google.cloud.retail.v2.GenerativeQuestionConfigOrBuilder> generativeQuestionConfigsBuilder_; /** * * * <pre> * All the questions for a given catalog. * </pre> * * <code> * repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1; * </code> */ public java.util.List<com.google.cloud.retail.v2.GenerativeQuestionConfig> getGenerativeQuestionConfigsList() { if (generativeQuestionConfigsBuilder_ == null) { return java.util.Collections.unmodifiableList(generativeQuestionConfigs_); } else { return generativeQuestionConfigsBuilder_.getMessageList(); } } /** * * * <pre> * All the questions for a given catalog. * </pre> * * <code> * repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1; * </code> */ public int getGenerativeQuestionConfigsCount() { if (generativeQuestionConfigsBuilder_ == null) { return generativeQuestionConfigs_.size(); } else { return generativeQuestionConfigsBuilder_.getCount(); } } /** * * * <pre> * All the questions for a given catalog. * </pre> * * <code> * repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1; * </code> */ public com.google.cloud.retail.v2.GenerativeQuestionConfig getGenerativeQuestionConfigs( int index) { if (generativeQuestionConfigsBuilder_ == null) { return generativeQuestionConfigs_.get(index); } else { return generativeQuestionConfigsBuilder_.getMessage(index); } } /** * * * <pre> * All the questions for a given catalog. * </pre> * * <code> * repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1; * </code> */ public Builder setGenerativeQuestionConfigs( int index, com.google.cloud.retail.v2.GenerativeQuestionConfig value) { if (generativeQuestionConfigsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureGenerativeQuestionConfigsIsMutable(); generativeQuestionConfigs_.set(index, value); onChanged(); } else { generativeQuestionConfigsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * All the questions for a given catalog. * </pre> * * <code> * repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1; * </code> */ public Builder setGenerativeQuestionConfigs( int index, com.google.cloud.retail.v2.GenerativeQuestionConfig.Builder builderForValue) { if (generativeQuestionConfigsBuilder_ == null) { ensureGenerativeQuestionConfigsIsMutable(); generativeQuestionConfigs_.set(index, builderForValue.build()); onChanged(); } else { generativeQuestionConfigsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * All the questions for a given catalog. * </pre> * * <code> * repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1; * </code> */ public Builder addGenerativeQuestionConfigs( com.google.cloud.retail.v2.GenerativeQuestionConfig value) { if (generativeQuestionConfigsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureGenerativeQuestionConfigsIsMutable(); generativeQuestionConfigs_.add(value); onChanged(); } else { generativeQuestionConfigsBuilder_.addMessage(value); } return this; } /** * * * <pre> * All the questions for a given catalog. * </pre> * * <code> * repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1; * </code> */ public Builder addGenerativeQuestionConfigs( int index, com.google.cloud.retail.v2.GenerativeQuestionConfig value) { if (generativeQuestionConfigsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureGenerativeQuestionConfigsIsMutable(); generativeQuestionConfigs_.add(index, value); onChanged(); } else { generativeQuestionConfigsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * All the questions for a given catalog. * </pre> * * <code> * repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1; * </code> */ public Builder addGenerativeQuestionConfigs( com.google.cloud.retail.v2.GenerativeQuestionConfig.Builder builderForValue) { if (generativeQuestionConfigsBuilder_ == null) { ensureGenerativeQuestionConfigsIsMutable(); generativeQuestionConfigs_.add(builderForValue.build()); onChanged(); } else { generativeQuestionConfigsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * All the questions for a given catalog. * </pre> * * <code> * repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1; * </code> */ public Builder addGenerativeQuestionConfigs( int index, com.google.cloud.retail.v2.GenerativeQuestionConfig.Builder builderForValue) { if (generativeQuestionConfigsBuilder_ == null) { ensureGenerativeQuestionConfigsIsMutable(); generativeQuestionConfigs_.add(index, builderForValue.build()); onChanged(); } else { generativeQuestionConfigsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * All the questions for a given catalog. * </pre> * * <code> * repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1; * </code> */ public Builder addAllGenerativeQuestionConfigs( java.lang.Iterable<? extends com.google.cloud.retail.v2.GenerativeQuestionConfig> values) { if (generativeQuestionConfigsBuilder_ == null) { ensureGenerativeQuestionConfigsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, generativeQuestionConfigs_); onChanged(); } else { generativeQuestionConfigsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * All the questions for a given catalog. * </pre> * * <code> * repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1; * </code> */ public Builder clearGenerativeQuestionConfigs() { if (generativeQuestionConfigsBuilder_ == null) { generativeQuestionConfigs_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { generativeQuestionConfigsBuilder_.clear(); } return this; } /** * * * <pre> * All the questions for a given catalog. * </pre> * * <code> * repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1; * </code> */ public Builder removeGenerativeQuestionConfigs(int index) { if (generativeQuestionConfigsBuilder_ == null) { ensureGenerativeQuestionConfigsIsMutable(); generativeQuestionConfigs_.remove(index); onChanged(); } else { generativeQuestionConfigsBuilder_.remove(index); } return this; } /** * * * <pre> * All the questions for a given catalog. * </pre> * * <code> * repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1; * </code> */ public com.google.cloud.retail.v2.GenerativeQuestionConfig.Builder getGenerativeQuestionConfigsBuilder(int index) { return getGenerativeQuestionConfigsFieldBuilder().getBuilder(index); } /** * * * <pre> * All the questions for a given catalog. * </pre> * * <code> * repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1; * </code> */ public com.google.cloud.retail.v2.GenerativeQuestionConfigOrBuilder getGenerativeQuestionConfigsOrBuilder(int index) { if (generativeQuestionConfigsBuilder_ == null) { return generativeQuestionConfigs_.get(index); } else { return generativeQuestionConfigsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * All the questions for a given catalog. * </pre> * * <code> * repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1; * </code> */ public java.util.List<? extends com.google.cloud.retail.v2.GenerativeQuestionConfigOrBuilder> getGenerativeQuestionConfigsOrBuilderList() { if (generativeQuestionConfigsBuilder_ != null) { return generativeQuestionConfigsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(generativeQuestionConfigs_); } } /** * * * <pre> * All the questions for a given catalog. * </pre> * * <code> * repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1; * </code> */ public com.google.cloud.retail.v2.GenerativeQuestionConfig.Builder addGenerativeQuestionConfigsBuilder() { return getGenerativeQuestionConfigsFieldBuilder() .addBuilder(com.google.cloud.retail.v2.GenerativeQuestionConfig.getDefaultInstance()); } /** * * * <pre> * All the questions for a given catalog. * </pre> * * <code> * repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1; * </code> */ public com.google.cloud.retail.v2.GenerativeQuestionConfig.Builder addGenerativeQuestionConfigsBuilder(int index) { return getGenerativeQuestionConfigsFieldBuilder() .addBuilder( index, com.google.cloud.retail.v2.GenerativeQuestionConfig.getDefaultInstance()); } /** * * * <pre> * All the questions for a given catalog. * </pre> * * <code> * repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1; * </code> */ public java.util.List<com.google.cloud.retail.v2.GenerativeQuestionConfig.Builder> getGenerativeQuestionConfigsBuilderList() { return getGenerativeQuestionConfigsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.retail.v2.GenerativeQuestionConfig, com.google.cloud.retail.v2.GenerativeQuestionConfig.Builder, com.google.cloud.retail.v2.GenerativeQuestionConfigOrBuilder> getGenerativeQuestionConfigsFieldBuilder() { if (generativeQuestionConfigsBuilder_ == null) { generativeQuestionConfigsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.retail.v2.GenerativeQuestionConfig, com.google.cloud.retail.v2.GenerativeQuestionConfig.Builder, com.google.cloud.retail.v2.GenerativeQuestionConfigOrBuilder>( generativeQuestionConfigs_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); generativeQuestionConfigs_ = null; } return generativeQuestionConfigsBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse) private static final com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse(); } public static com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListGenerativeQuestionConfigsResponse> PARSER = new com.google.protobuf.AbstractParser<ListGenerativeQuestionConfigsResponse>() { @java.lang.Override public ListGenerativeQuestionConfigsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListGenerativeQuestionConfigsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListGenerativeQuestionConfigsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.retail.v2.ListGenerativeQuestionConfigsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/cxf
35,374
rt/databinding/jaxb/src/main/java/org/apache/cxf/jaxb/JAXBDataBinding.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cxf.jaxb; import java.io.IOException; import java.io.OutputStream; import java.lang.annotation.Annotation; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Consumer; import java.util.logging.Level; import java.util.logging.Logger; import javax.xml.namespace.QName; import javax.xml.stream.XMLEventReader; import javax.xml.stream.XMLEventWriter; import javax.xml.stream.XMLStreamReader; import javax.xml.stream.XMLStreamWriter; import javax.xml.transform.dom.DOMResult; import javax.xml.transform.dom.DOMSource; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.xml.sax.InputSource; import jakarta.xml.bind.JAXBContext; import jakarta.xml.bind.JAXBElement; import jakarta.xml.bind.JAXBException; import jakarta.xml.bind.Marshaller; import jakarta.xml.bind.Unmarshaller; import jakarta.xml.bind.ValidationEventHandler; import jakarta.xml.bind.annotation.XmlElement; import jakarta.xml.bind.annotation.XmlElementRef; import jakarta.xml.bind.annotation.adapters.XmlAdapter; import jakarta.xml.bind.annotation.adapters.XmlJavaTypeAdapter; import org.apache.cxf.Bus; import org.apache.cxf.common.injection.NoJSR250Annotations; import org.apache.cxf.common.jaxb.JAXBBeanInfo; import org.apache.cxf.common.jaxb.JAXBContextCache; import org.apache.cxf.common.jaxb.JAXBContextCache.CachedContextAndSchemas; import org.apache.cxf.common.jaxb.JAXBContextProxy; import org.apache.cxf.common.jaxb.JAXBUtils; import org.apache.cxf.common.logging.LogUtils; import org.apache.cxf.common.util.PackageUtils; import org.apache.cxf.common.util.PropertyUtils; import org.apache.cxf.common.util.ReflectionUtil; import org.apache.cxf.common.xmlschema.SchemaCollection; import org.apache.cxf.databinding.AbstractInterceptorProvidingDataBinding; import org.apache.cxf.databinding.AbstractWrapperHelper; import org.apache.cxf.databinding.DataReader; import org.apache.cxf.databinding.DataWriter; import org.apache.cxf.databinding.WrapperCapableDatabinding; import org.apache.cxf.databinding.WrapperHelper; import org.apache.cxf.interceptor.InterceptorProvider; import org.apache.cxf.jaxb.attachment.JAXBAttachmentSchemaValidationHack; import org.apache.cxf.jaxb.io.DataReaderImpl; import org.apache.cxf.jaxb.io.DataWriterImpl; import org.apache.cxf.resource.URIResolver; import org.apache.cxf.service.Service; import org.apache.cxf.service.factory.ServiceConstructionException; import org.apache.cxf.service.model.MessageInfo; import org.apache.cxf.service.model.MessagePartInfo; import org.apache.cxf.service.model.ServiceInfo; import org.apache.cxf.staxutils.StaxUtils; import org.apache.cxf.ws.addressing.ObjectFactory; @NoJSR250Annotations public class JAXBDataBinding extends AbstractInterceptorProvidingDataBinding implements WrapperCapableDatabinding, InterceptorProvider { public static final String READER_VALIDATION_EVENT_HANDLER = "jaxb-reader-validation-event-handler"; public static final String VALIDATION_EVENT_HANDLER = "jaxb-validation-event-handler"; public static final String SET_VALIDATION_EVENT_HANDLER = "set-jaxb-validation-event-handler"; public static final String WRITER_VALIDATION_EVENT_HANDLER = "jaxb-writer-validation-event-handler"; public static final String SCHEMA_RESOURCE = "SCHEMRESOURCE"; public static final String MTOM_THRESHOLD = "org.apache.cxf.jaxb.mtomThreshold"; public static final String UNWRAP_JAXB_ELEMENT = "unwrap.jaxb.element"; public static final String USE_JAXB_BRIDGE = "use.jaxb.bridge"; public static final String JAXB_SCAN_PACKAGES = "jaxb.scanPackages"; private static final Logger LOG = LogUtils.getLogger(JAXBDataBinding.class); private static final Class<?>[] SUPPORTED_READER_FORMATS = new Class<?>[] {Node.class, XMLEventReader.class, XMLStreamReader.class}; private static final Class<?>[] SUPPORTED_WRITER_FORMATS = new Class<?>[] {OutputStream.class, Node.class, XMLEventWriter.class, XMLStreamWriter.class}; private static class DelayedDOMResult extends DOMResult { private final URL resource; private final String publicId; DelayedDOMResult(URL url, String sysId, String pId) { super(null, sysId); resource = url; publicId = pId; } public synchronized Node getNode() { Node nd = super.getNode(); if (nd == null) { try { InputSource src = new InputSource(resource.openStream()); src.setSystemId(this.getSystemId()); src.setPublicId(publicId); Document doc = StaxUtils.read(src); setNode(doc); nd = super.getNode(); } catch (Exception ex) { throw new RuntimeException(ex); } } return nd; } } private static final Map<String, DOMResult> BUILT_IN_SCHEMAS = new HashMap<>(); static { try (URIResolver resolver = new URIResolver()) { try { resolver.resolve("", "classpath:/schemas/wsdl/ws-addr-wsdl.xsd", JAXBDataBinding.class); if (resolver.isResolved()) { resolver.getInputStream().close(); DOMResult dr = new DelayedDOMResult(resolver.getURL(), "classpath:/schemas/wsdl/ws-addr-wsdl.xsd", "http://www.w3.org/2005/02/addressing/wsdl"); BUILT_IN_SCHEMAS.put("http://www.w3.org/2005/02/addressing/wsdl", dr); resolver.unresolve(); } } catch (Exception e) { //IGNORE } try { resolver.resolve("", "classpath:/schemas/wsdl/ws-addr.xsd", JAXBDataBinding.class); if (resolver.isResolved()) { resolver.getInputStream().close(); DOMResult dr = new DelayedDOMResult(resolver.getURL(), "classpath:/schemas/wsdl/ws-addr.xsd", "http://www.w3.org/2005/08/addressing"); BUILT_IN_SCHEMAS.put("http://www.w3.org/2005/08/addressing", dr); resolver.unresolve(); } } catch (Exception e) { //IGNORE } try { resolver.resolve("", "classpath:/schemas/wsdl/wsrm.xsd", JAXBDataBinding.class); if (resolver.isResolved()) { resolver.getInputStream().close(); DOMResult dr = new DelayedDOMResult(resolver.getURL(), "classpath:/schemas/wsdl/wsrm.xsd", "http://schemas.xmlsoap.org/ws/2005/02/rm"); BUILT_IN_SCHEMAS.put("http://schemas.xmlsoap.org/ws/2005/02/rm", dr); resolver.unresolve(); } } catch (Exception e) { //IGNORE } } catch (Exception e) { //IGNORE } } Class<?>[] extraClass; JAXBContext context; Set<Class<?>> contextClasses; Collection<Object> typeRefs = new ArrayList<>(); Class<?> cls; private Map<String, Object> contextProperties = new HashMap<>(); private List<XmlAdapter<?, ?>> adapters = new ArrayList<>(); private Map<String, Object> marshallerProperties = new HashMap<>(); private Map<String, Object> unmarshallerProperties = new HashMap<>(); private Unmarshaller.Listener unmarshallerListener; private Marshaller.Listener marshallerListener; private ValidationEventHandler validationEventHandler; private Object escapeHandler; private Object noEscapeHandler; private boolean unwrapJAXBElement = true; private boolean scanPackages = true; private boolean qualifiedSchemas; public JAXBDataBinding() { } public JAXBDataBinding(boolean q) { this.qualifiedSchemas = q; } public JAXBDataBinding(Class<?>... classes) throws JAXBException { contextClasses = new LinkedHashSet<>(Arrays.asList(classes)); setContext(createJAXBContext(contextClasses)); } public JAXBDataBinding(boolean qualified, Map<String, Object> props) throws JAXBException { this(qualified); if (props != null && props.get("jaxb.additionalContextClasses") != null) { Object o = props.get("jaxb.additionalContextClasses"); if (o instanceof Class) { o = new Class[] {(Class<?>)o}; } extraClass = (Class[])o; } // the default for scan packages is true, so the jaxb scan packages // property must be explicitly set to false to disable it if (PropertyUtils.isFalse(props, JAXB_SCAN_PACKAGES)) { scanPackages = false; } } public JAXBDataBinding(JAXBContext context) { this(); setContext(context); } protected boolean getQualifiedSchemas() { return qualifiedSchemas; } public JAXBContext getContext() { return context; } public final void setContext(JAXBContext ctx) { context = ctx; //create default MininumEscapeHandler escapeHandler = JAXBUtils.createMininumEscapeHandler(ctx.getClass()); noEscapeHandler = JAXBUtils.createNoEscapeHandler(ctx.getClass()); } public Object getEscapeHandler() { return escapeHandler; } public void setEscapeHandler(Object handler) { escapeHandler = handler; } public void applyEscapeHandler(boolean escape, Consumer<Object> consumer) { if (escape) { consumer.accept(escapeHandler); } else if (noEscapeHandler != null) { consumer.accept(noEscapeHandler); } } @SuppressWarnings("unchecked") public <T> DataWriter<T> createWriter(Class<T> c) { Integer mtomThresholdInt = Integer.valueOf(getMtomThreshold()); if (c == XMLStreamWriter.class) { DataWriterImpl<XMLStreamWriter> r = new DataWriterImpl<>(getBus(), this, true); r.setMtomThreshold(mtomThresholdInt); return (DataWriter<T>)r; } else if (c == OutputStream.class) { DataWriterImpl<OutputStream> r = new DataWriterImpl<>(getBus(), this, false); r.setMtomThreshold(mtomThresholdInt); return (DataWriter<T>)r; } else if (c == XMLEventWriter.class) { DataWriterImpl<XMLEventWriter> r = new DataWriterImpl<>(getBus(), this, true); r.setMtomThreshold(mtomThresholdInt); return (DataWriter<T>)r; } else if (c == Node.class) { DataWriterImpl<Node> r = new DataWriterImpl<>(getBus(), this, false); r.setMtomThreshold(mtomThresholdInt); return (DataWriter<T>)r; } return null; } public Class<?>[] getSupportedWriterFormats() { return SUPPORTED_WRITER_FORMATS; } @SuppressWarnings("unchecked") public <T> DataReader<T> createReader(Class<T> c) { DataReader<T> dr = null; if (c == XMLStreamReader.class) { dr = (DataReader<T>)new DataReaderImpl<XMLStreamReader>(this, unwrapJAXBElement); } else if (c == XMLEventReader.class) { dr = (DataReader<T>)new DataReaderImpl<XMLEventReader>(this, unwrapJAXBElement); } else if (c == Node.class) { dr = (DataReader<T>)new DataReaderImpl<Node>(this, unwrapJAXBElement); } return dr; } public Class<?>[] getSupportedReaderFormats() { return SUPPORTED_READER_FORMATS; } @SuppressWarnings("unchecked") public synchronized void initialize(Service service) { inInterceptors.addIfAbsent(JAXBAttachmentSchemaValidationHack.INSTANCE); inFaultInterceptors.addIfAbsent(JAXBAttachmentSchemaValidationHack.INSTANCE); // context is already set, don't redo it if (context != null) { return; } contextClasses = new LinkedHashSet<>(); for (ServiceInfo serviceInfo : service.getServiceInfos()) { JAXBContextInitializer initializer = new JAXBContextInitializer(getBus(), serviceInfo, contextClasses, typeRefs, this.getUnmarshallerProperties()); initializer.walk(); if (serviceInfo.getProperty("extra.class") != null) { Set<Class<?>> exClasses = serviceInfo.getProperty("extra.class", Set.class); contextClasses.addAll(exClasses); } } String tns = getNamespaceToUse(service); final CachedContextAndSchemas cachedContextAndSchemas; try { cachedContextAndSchemas = createJAXBContextAndSchemas(contextClasses, tns); } catch (JAXBException e1) { throw new ServiceConstructionException(e1); } final JAXBContext ctx = cachedContextAndSchemas.getContext(); if (LOG.isLoggable(Level.FINE)) { LOG.log(Level.FINE, "CREATED_JAXB_CONTEXT", new Object[] {ctx, contextClasses}); } setContext(ctx); for (ServiceInfo serviceInfo : service.getServiceInfos()) { SchemaCollection col = serviceInfo.getXmlSchemaCollection(); if (col.getXmlSchemas().length > 1) { // someone has already filled in the types justCheckForJAXBAnnotations(serviceInfo); continue; } boolean schemasFromCache = false; Collection<DOMSource> schemas = getSchemas(); if (schemas == null || schemas.isEmpty()) { schemas = cachedContextAndSchemas.getSchemas(); if (schemas != null) { schemasFromCache = true; } } else { schemasFromCache = true; } Set<DOMSource> bi = new LinkedHashSet<>(); if (schemas == null) { schemas = new LinkedHashSet<>(); try { for (DOMResult r : generateJaxbSchemas()) { DOMSource src = new DOMSource(r.getNode(), r.getSystemId()); if (isInBuiltInSchemas(r)) { bi.add(src); } else { schemas.add(src); } } //put any builtins at the end. Anything that DOES import them //will cause it to load automatically and we'll skip them later schemas.addAll(bi); } catch (IOException e) { throw new ServiceConstructionException("SCHEMA_GEN_EXC", LOG, e); } } for (DOMSource r : schemas) { if (bi.contains(r)) { String ns = ((Document)r.getNode()).getDocumentElement().getAttribute("targetNamespace"); if (serviceInfo.getSchema(ns) != null) { continue; } } //StaxUtils.print(r.getNode()); //System.out.println(); addSchemaDocument(serviceInfo, col, (Document)r.getNode(), r.getSystemId()); } JAXBSchemaInitializer schemaInit = new JAXBSchemaInitializer(serviceInfo, col, context, this.qualifiedSchemas, tns); schemaInit.walk(); if (cachedContextAndSchemas != null && !schemasFromCache) { cachedContextAndSchemas.setSchemas(schemas); } } } protected void justCheckForJAXBAnnotations(ServiceInfo serviceInfo) { for (MessageInfo mi: serviceInfo.getMessages().values()) { for (MessagePartInfo mpi : mi.getMessageParts()) { checkForJAXBAnnotations(mpi, serviceInfo.getXmlSchemaCollection(), serviceInfo.getTargetNamespace()); } } } private void checkForJAXBAnnotations(MessagePartInfo mpi, SchemaCollection schemaCollection, String ns) { Annotation[] anns = (Annotation[])mpi.getProperty("parameter.annotations"); JAXBContextProxy ctx = JAXBUtils.createJAXBContextProxy(context, schemaCollection, ns); XmlJavaTypeAdapter jta = JAXBSchemaInitializer.findFromTypeAdapter(ctx, mpi.getTypeClass(), anns); if (jta != null) { JAXBBeanInfo jtaBeanInfo = JAXBSchemaInitializer.findFromTypeAdapter(ctx, jta.value()); JAXBBeanInfo beanInfo = JAXBSchemaInitializer.getBeanInfo(ctx, mpi.getTypeClass()); if (jtaBeanInfo != beanInfo) { mpi.setProperty("parameter.annotations", anns); mpi.setProperty("honor.jaxb.annotations", Boolean.TRUE); } } } protected String getNamespaceToUse(Service service) { if ("true".equals(service.get("org.apache.cxf.databinding.namespace"))) { return null; } final String tns; if (!service.getServiceInfos().isEmpty()) { tns = service.getServiceInfos().get(0).getInterface().getName().getNamespaceURI(); } else { tns = service.getName().getNamespaceURI(); } return tns; } public void setExtraClass(Class<?>[] userExtraClass) { extraClass = userExtraClass; } public Class<?>[] getExtraClass() { return extraClass; } // default access for tests. List<DOMResult> generateJaxbSchemas() throws IOException { return JAXBUtils.generateJaxbSchemas(context, BUILT_IN_SCHEMAS); } public JAXBContext createJAXBContext(Set<Class<?>> classes) throws JAXBException { return createJAXBContext(classes, null); } public JAXBContext createJAXBContext(Set<Class<?>> classes, String defaultNs) throws JAXBException { return createJAXBContextAndSchemas(classes, defaultNs).getContext(); } public CachedContextAndSchemas createJAXBContextAndSchemas(Set<Class<?>> classes, String defaultNs) throws JAXBException { //add user extra class into jaxb context if (extraClass != null && extraClass.length > 0) { for (Class<?> clz : extraClass) { classes.add(clz); } } if (scanPackages) { JAXBContextCache.scanPackages(classes); } addWsAddressingTypes(classes); return JAXBContextCache.getCachedContextAndSchemas(classes, defaultNs, contextProperties, typeRefs, true); } private void addWsAddressingTypes(Set<Class<?>> classes) { if (classes.contains(ObjectFactory.class)) { // ws-addressing is used, lets add the specific types try { classes.add(Class.forName("org.apache.cxf.ws.addressing.wsdl.ObjectFactory")); classes.add(Class.forName("org.apache.cxf.ws.addressing.wsdl.AttributedQNameType")); classes.add(Class.forName("org.apache.cxf.ws.addressing.wsdl.ServiceNameType")); } catch (ClassNotFoundException unused) { // REVISIT - ignorable if WS-ADDRESSING not available? // maybe add a way to allow interceptors to add stuff to the // context? } } } public Set<Class<?>> getContextClasses() { return Collections.unmodifiableSet(this.contextClasses); } /** * Return a map of properties. These properties are passed to * JAXBContext.newInstance when this object creates a context. * * @return the map of JAXB context properties. */ public Map<String, Object> getContextProperties() { return contextProperties; } /** * Set a map of JAXB context properties. These properties are passed to * JAXBContext.newInstance when this object creates a context. Note that if * you create a JAXB context elsewhere, you will not respect these * properties unless you handle it manually. * * @param contextProperties map of properties. */ public void setContextProperties(Map<String, Object> contextProperties) { this.contextProperties = contextProperties; } public List<XmlAdapter<?, ?>> getConfiguredXmlAdapters() { return adapters; } public void setConfiguredXmlAdapters(List<XmlAdapter<?, ?>> adpters) { this.adapters = adpters; } /** * Return a map of properties. These properties are set into the JAXB * Marshaller (via Marshaller.setProperty(...) when the marshaller is * created. * * @return the map of JAXB marshaller properties. */ public Map<String, Object> getMarshallerProperties() { return marshallerProperties; } /** * Set a map of JAXB marshaller properties. These properties are set into * the JAXB Marshaller (via Marshaller.setProperty(...) when the marshaller * is created. * * @param marshallerProperties map of properties. */ public void setMarshallerProperties(Map<String, Object> marshallerProperties) { this.marshallerProperties = marshallerProperties; } /** * Return a map of properties. These properties are set into the JAXB * Unmarshaller (via Unmarshaller.setProperty(...) when the unmarshaller is * created. * * @return the map of JAXB unmarshaller properties. */ public Map<String, Object> getUnmarshallerProperties() { return unmarshallerProperties; } /** * Set a map of JAXB unmarshaller properties. These properties are set into * the JAXB Unmarshaller (via Unmarshaller.setProperty(...) when the unmarshaller * is created. * * @param unmarshallerProperties map of properties. */ public void setUnmarshallerProperties(Map<String, Object> unmarshallerProperties) { this.unmarshallerProperties = unmarshallerProperties; } /** * Returns the Unmarshaller.Listener that will be registered on the Unmarshallers * @return */ public Unmarshaller.Listener getUnmarshallerListener() { return unmarshallerListener; } /** * Sets the Unmarshaller.Listener that will be registered on the Unmarshallers * @param unmarshallerListener */ public void setUnmarshallerListener(Unmarshaller.Listener unmarshallerListener) { this.unmarshallerListener = unmarshallerListener; } /** * Returns the Marshaller.Listener that will be registered on the Marshallers * @return */ public Marshaller.Listener getMarshallerListener() { return marshallerListener; } /** * Sets the Marshaller.Listener that will be registered on the Marshallers * @param marshallerListener */ public void setMarshallerListener(Marshaller.Listener marshallerListener) { this.marshallerListener = marshallerListener; } public ValidationEventHandler getValidationEventHandler() { return validationEventHandler; } public void setValidationEventHandler(ValidationEventHandler validationEventHandler) { this.validationEventHandler = validationEventHandler; } public boolean isUnwrapJAXBElement() { return unwrapJAXBElement; } public void setUnwrapJAXBElement(boolean unwrapJAXBElement) { this.unwrapJAXBElement = unwrapJAXBElement; } public WrapperHelper createWrapperHelper(Class<?> wrapperType, QName wrapperName, List<String> partNames, List<String> elTypeNames, List<Class<?>> partClasses) { List<Method> getMethods = new ArrayList<>(partNames.size()); List<Method> setMethods = new ArrayList<>(partNames.size()); List<Method> jaxbMethods = new ArrayList<>(partNames.size()); List<Field> fields = new ArrayList<>(partNames.size()); Method[] allMethods = wrapperType.getMethods(); String packageName = PackageUtils.getPackageName(wrapperType); //if wrappertype class is generated by ASM, getPackage() always return null if (wrapperType.getPackage() != null) { packageName = wrapperType.getPackage().getName(); } String objectFactoryClassName = packageName + ".ObjectFactory"; Object objectFactory = null; try { objectFactory = wrapperType.getClassLoader().loadClass(objectFactoryClassName) .getDeclaredConstructor().newInstance(); } catch (Exception e) { //ignore, probably won't need it } Method[] allOFMethods; if (objectFactory != null) { allOFMethods = objectFactory.getClass().getMethods(); } else { allOFMethods = new Method[0]; } for (int x = 0; x < partNames.size(); x++) { String partName = partNames.get(x); if (partName == null) { getMethods.add(null); setMethods.add(null); fields.add(null); jaxbMethods.add(null); continue; } String elementType = elTypeNames.get(x); String getAccessor = JAXBUtils.nameToIdentifier(partName, JAXBUtils.IdentifierType.GETTER); String setAccessor = JAXBUtils.nameToIdentifier(partName, JAXBUtils.IdentifierType.SETTER); Method getMethod = null; Method setMethod = null; Class<?> valueClass = wrapperType; try { getMethod = valueClass.getMethod(getAccessor, AbstractWrapperHelper.NO_CLASSES); } catch (NoSuchMethodException ex) { //ignore for now } Field elField = getElField(partName, valueClass); if (getMethod == null && elementType != null && "boolean".equalsIgnoreCase(elementType) && (elField == null || (!Collection.class.isAssignableFrom(elField.getType()) && !elField.getType().isArray()))) { try { String newAcc = getAccessor.replaceFirst("get", "is"); getMethod = wrapperType.getMethod(newAcc, AbstractWrapperHelper.NO_CLASSES); } catch (NoSuchMethodException ex) { //ignore for now } } if (getMethod == null && "return".equals(partName)) { //RI generated code uses this try { getMethod = valueClass.getMethod("get_return", AbstractWrapperHelper.NO_CLASSES); } catch (NoSuchMethodException ex) { try { getMethod = valueClass.getMethod("is_return", new Class[0]); } catch (NoSuchMethodException ex2) { //ignore for now } } } if (getMethod == null && elField != null) { getAccessor = JAXBUtils.nameToIdentifier(elField.getName(), JAXBUtils.IdentifierType.GETTER); setAccessor = JAXBUtils.nameToIdentifier(elField.getName(), JAXBUtils.IdentifierType.SETTER); try { getMethod = valueClass.getMethod(getAccessor, AbstractWrapperHelper.NO_CLASSES); } catch (NoSuchMethodException ex) { //ignore for now } } String setAccessor2 = setAccessor; if ("return".equals(partName)) { //some versions of jaxb map "return" to "set_return" instead of "setReturn" setAccessor2 = "set_return"; } for (Method method : allMethods) { if (method.getParameterTypes() != null && method.getParameterTypes().length == 1 && (setAccessor.equals(method.getName()) || setAccessor2.equals(method.getName()))) { setMethod = method; break; } } getMethods.add(getMethod); setMethods.add(setMethod); if (setMethod != null && JAXBElement.class.isAssignableFrom(setMethod.getParameterTypes()[0])) { Type t = setMethod.getGenericParameterTypes()[0]; Class<?> pcls = null; if (t instanceof ParameterizedType) { t = ((ParameterizedType)t).getActualTypeArguments()[0]; } if (t instanceof Class) { pcls = (Class<?>)t; } String methodName = "create" + wrapperType.getSimpleName() + setMethod.getName().substring(3); for (Method m : allOFMethods) { if (m.getName().equals(methodName) && m.getParameterTypes().length == 1 && (pcls == null || pcls.equals(m.getParameterTypes()[0]))) { jaxbMethods.add(m); } } } else { jaxbMethods.add(null); } if (elField != null) { // JAXB Type get XmlElement Annotation XmlElement el = elField.getAnnotation(XmlElement.class); if (el != null && (partName.equals(el.name()) || "##default".equals(el.name()))) { ReflectionUtil.setAccessible(elField); fields.add(elField); } else { if (getMethod == null && setMethod == null) { if (el != null) { LOG.warning("Could not create accessor for property " + partName + " of type " + wrapperType.getName() + " as the @XmlElement " + "defines the name as " + el.name()); } else { LOG.warning("Could not create accessor for property " + partName + " of type " + wrapperType.getName()); } } fields.add(null); } } else { fields.add(null); } } return createWrapperHelper(getBus(), wrapperType, setMethods.toArray(new Method[0]), getMethods.toArray(new Method[0]), jaxbMethods.toArray(new Method[0]), fields.toArray(new Field[0]), objectFactory); } public static boolean isInBuiltInSchemas(DOMResult schema) { return BUILT_IN_SCHEMAS.containsValue(schema); } private static Field getElField(String partName, final Class<?> wrapperType) { String fieldName = JAXBUtils.nameToIdentifier(partName, JAXBUtils.IdentifierType.VARIABLE); Field[] fields = ReflectionUtil.getDeclaredFields(wrapperType); for (Field field : fields) { XmlElement el = field.getAnnotation(XmlElement.class); if (el != null && partName.equals(el.name())) { return field; } XmlElementRef xmlElementRefAnnotation = field.getAnnotation(XmlElementRef.class); if (xmlElementRefAnnotation != null && partName.equals(xmlElementRefAnnotation.name())) { return field; } if (field.getName().equals(fieldName)) { return field; } } return null; } private static WrapperHelper createWrapperHelper(Bus bus, Class<?> wrapperType, Method[] setMethods, Method[] getMethods, Method[] jaxbMethods, Field[] fields, Object objectFactory) { WrapperHelper wh = compileWrapperHelper(bus, wrapperType, setMethods, getMethods, jaxbMethods, fields, objectFactory); if (wh == null) { wh = new JAXBWrapperHelper(wrapperType, setMethods, getMethods, jaxbMethods, fields, objectFactory); } return wh; } private static WrapperHelper compileWrapperHelper(Bus bus, Class<?> wrapperType, Method[] setMethods, Method[] getMethods, Method[] jaxbMethods, Field[] fields, Object objectFactory) { try { WrapperHelperCreator creator = bus.getExtension(WrapperHelperCreator.class); return creator.compile(wrapperType, setMethods, getMethods, jaxbMethods, fields, objectFactory); } catch (Throwable t) { // Some error - probably a bad version of ASM or similar return null; } } }
apache/tomcat
35,020
java/org/apache/catalina/startup/Catalina.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.catalina.startup; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.lang.reflect.Constructor; import java.net.ConnectException; import java.net.Socket; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.logging.LogManager; import org.apache.catalina.Container; import org.apache.catalina.LifecycleException; import org.apache.catalina.LifecycleState; import org.apache.catalina.Server; import org.apache.catalina.connector.Connector; import org.apache.catalina.core.StandardContext; import org.apache.juli.ClassLoaderLogManager; import org.apache.juli.logging.Log; import org.apache.juli.logging.LogFactory; import org.apache.tomcat.util.ExceptionUtils; import org.apache.tomcat.util.digester.Digester; import org.apache.tomcat.util.digester.Rule; import org.apache.tomcat.util.digester.RuleSet; import org.apache.tomcat.util.file.ConfigFileLoader; import org.apache.tomcat.util.file.ConfigurationSource; import org.apache.tomcat.util.log.SystemLogHandler; import org.apache.tomcat.util.res.StringManager; import org.xml.sax.Attributes; import org.xml.sax.InputSource; /** * Startup/Shutdown shell program for Catalina. The following command line options are recognized: * <ul> * <li><b>-config {pathname}</b> - Set the pathname of the configuration file to be processed. If a relative path is * specified, it will be interpreted as relative to the directory pathname specified by the "catalina.base" system * property. [conf/server.xml]</li> * <li><b>-help</b> - Display usage information.</li> * <li><b>-nonaming</b> - Disable naming support.</li> * <li><b>configtest</b> - Try to test the config</li> * <li><b>start</b> - Start an instance of Catalina.</li> * <li><b>stop</b> - Stop the currently running instance of Catalina.</li> * </ul> */ public class Catalina { /** * The string manager for this package. */ protected static final StringManager sm = StringManager.getManager(Constants.Package); public static final String SERVER_XML = "conf/server.xml"; // ----------------------------------------------------- Instance Variables /** * Use await. */ protected boolean await = false; /** * Pathname to the server configuration file. */ protected String configFile = SERVER_XML; // XXX Should be moved to embedded /** * The shared extensions class loader for this server. */ protected ClassLoader parentClassLoader = Catalina.class.getClassLoader(); /** * The server component we are starting or stopping. */ protected Server server = null; /** * Use shutdown hook flag. */ protected boolean useShutdownHook = true; /** * Shutdown hook. */ protected Thread shutdownHook = null; /** * Is naming enabled ? */ protected boolean useNaming = true; /** * Prevent duplicate loads. */ protected boolean loaded = false; /** * Rethrow exceptions on init failure. */ protected boolean throwOnInitFailure = Boolean.getBoolean("org.apache.catalina.startup.EXIT_ON_INIT_FAILURE"); /** * Generate Tomcat embedded code from configuration files. */ protected boolean generateCode = false; /** * Location of generated sources. */ protected File generatedCodeLocation = null; /** * Value of the argument. */ protected String generatedCodeLocationParameter = null; /** * Top package name for generated source. */ protected String generatedCodePackage = "catalinaembedded"; /** * Use generated code as a replacement for configuration files. */ protected boolean useGeneratedCode = false; // ----------------------------------------------------------- Constructors public Catalina() { ExceptionUtils.preload(); } // ------------------------------------------------------------- Properties public void setConfigFile(String file) { configFile = file; } public String getConfigFile() { return configFile; } public void setUseShutdownHook(boolean useShutdownHook) { this.useShutdownHook = useShutdownHook; } public boolean getUseShutdownHook() { return useShutdownHook; } public boolean getGenerateCode() { return this.generateCode; } public void setGenerateCode(boolean generateCode) { this.generateCode = generateCode; } public boolean getUseGeneratedCode() { return this.useGeneratedCode; } public void setUseGeneratedCode(boolean useGeneratedCode) { this.useGeneratedCode = useGeneratedCode; } public File getGeneratedCodeLocation() { return this.generatedCodeLocation; } public void setGeneratedCodeLocation(File generatedCodeLocation) { this.generatedCodeLocation = generatedCodeLocation; } public String getGeneratedCodePackage() { return this.generatedCodePackage; } public void setGeneratedCodePackage(String generatedCodePackage) { this.generatedCodePackage = generatedCodePackage; } /** * @return <code>true</code> if an exception should be thrown if an error occurs during server init */ public boolean getThrowOnInitFailure() { return throwOnInitFailure; } /** * Set the behavior regarding errors that could occur during server init. * * @param throwOnInitFailure the new flag value */ public void setThrowOnInitFailure(boolean throwOnInitFailure) { this.throwOnInitFailure = throwOnInitFailure; } /** * Set the shared extensions class loader. * * @param parentClassLoader The shared extensions class loader. */ public void setParentClassLoader(ClassLoader parentClassLoader) { this.parentClassLoader = parentClassLoader; } public ClassLoader getParentClassLoader() { if (parentClassLoader != null) { return parentClassLoader; } return ClassLoader.getSystemClassLoader(); } public void setServer(Server server) { this.server = server; } public Server getServer() { return server; } /** * @return <code>true</code> if naming is enabled. */ public boolean isUseNaming() { return this.useNaming; } /** * Enables or disables naming support. * * @param useNaming The new use naming value */ public void setUseNaming(boolean useNaming) { this.useNaming = useNaming; } public void setAwait(boolean b) { await = b; } public boolean isAwait() { return await; } // ------------------------------------------------------ Protected Methods /** * Process the specified command line arguments. * * @param args Command line arguments to process * * @return <code>true</code> if we should continue processing */ protected boolean arguments(String[] args) { boolean isConfig = false; boolean isGenerateCode = false; if (args.length < 1) { usage(); return false; } for (String arg : args) { if (isConfig) { configFile = arg; isConfig = false; } else if (arg.equals("-config")) { isConfig = true; } else if (arg.equals("-generateCode")) { setGenerateCode(true); isGenerateCode = true; } else if (arg.equals("-useGeneratedCode")) { setUseGeneratedCode(true); isGenerateCode = false; } else if (arg.equals("-nonaming")) { setUseNaming(false); isGenerateCode = false; } else if (arg.equals("-help")) { usage(); return false; } else if (arg.equals("start")) { isGenerateCode = false; // NOOP } else if (arg.equals("configtest")) { isGenerateCode = false; // NOOP } else if (arg.equals("stop")) { isGenerateCode = false; // NOOP } else if (isGenerateCode) { generatedCodeLocationParameter = arg; isGenerateCode = false; } else { usage(); return false; } } return true; } /** * Return a File object representing our configuration file. * * @return the main configuration file */ protected File configFile() { File file = new File(configFile); if (!file.isAbsolute()) { file = new File(Bootstrap.getCatalinaBase(), configFile); } return file; } /** * Create and configure the Digester we will be using for startup. * * @return the main digester to parse server.xml */ protected Digester createStartDigester() { // Initialize the digester Digester digester = new Digester(); digester.setValidating(false); digester.setRulesValidation(true); Map<Class<?>,List<String>> fakeAttributes = new HashMap<>(); // Ignore className on all elements List<String> objectAttrs = new ArrayList<>(); objectAttrs.add("className"); fakeAttributes.put(Object.class, objectAttrs); // Ignore attribute added by Eclipse for its internal tracking List<String> contextAttrs = new ArrayList<>(); contextAttrs.add("source"); fakeAttributes.put(StandardContext.class, contextAttrs); // Ignore Connector attribute used internally but set on Server List<String> connectorAttrs = new ArrayList<>(); connectorAttrs.add("portOffset"); fakeAttributes.put(Connector.class, connectorAttrs); digester.setFakeAttributes(fakeAttributes); digester.setUseContextClassLoader(true); // Configure the actions we will be using digester.addObjectCreate("Server", "org.apache.catalina.core.StandardServer", "className"); digester.addSetProperties("Server"); digester.addSetNext("Server", "setServer", "org.apache.catalina.Server"); digester.addObjectCreate("Server/GlobalNamingResources", "org.apache.catalina.deploy.NamingResourcesImpl"); digester.addSetProperties("Server/GlobalNamingResources"); digester.addSetNext("Server/GlobalNamingResources", "setGlobalNamingResources", "org.apache.catalina.deploy.NamingResourcesImpl"); digester.addRule("Server/Listener", new ListenerCreateRule(null, "className")); digester.addSetProperties("Server/Listener"); digester.addSetNext("Server/Listener", "addLifecycleListener", "org.apache.catalina.LifecycleListener"); digester.addObjectCreate("Server/Service", "org.apache.catalina.core.StandardService", "className"); digester.addSetProperties("Server/Service"); digester.addSetNext("Server/Service", "addService", "org.apache.catalina.Service"); digester.addObjectCreate("Server/Service/Listener", null, // MUST be specified in the element "className"); digester.addSetProperties("Server/Service/Listener"); digester.addSetNext("Server/Service/Listener", "addLifecycleListener", "org.apache.catalina.LifecycleListener"); // Executor digester.addObjectCreate("Server/Service/Executor", "org.apache.catalina.core.StandardThreadExecutor", "className"); digester.addSetProperties("Server/Service/Executor"); digester.addSetNext("Server/Service/Executor", "addExecutor", "org.apache.catalina.Executor"); digester.addRule("Server/Service/Connector", new ConnectorCreateRule()); digester.addSetProperties("Server/Service/Connector", new String[] { "executor", "sslImplementationName", "protocol" }); digester.addSetNext("Server/Service/Connector", "addConnector", "org.apache.catalina.connector.Connector"); digester.addRule("Server/Service/Connector", new AddPortOffsetRule()); digester.addObjectCreate("Server/Service/Connector/SSLHostConfig", "org.apache.tomcat.util.net.SSLHostConfig"); digester.addSetProperties("Server/Service/Connector/SSLHostConfig"); digester.addSetNext("Server/Service/Connector/SSLHostConfig", "addSslHostConfig", "org.apache.tomcat.util.net.SSLHostConfig"); digester.addRule("Server/Service/Connector/SSLHostConfig/Certificate", new CertificateCreateRule()); digester.addSetProperties("Server/Service/Connector/SSLHostConfig/Certificate", new String[] { "type" }); digester.addSetNext("Server/Service/Connector/SSLHostConfig/Certificate", "addCertificate", "org.apache.tomcat.util.net.SSLHostConfigCertificate"); digester.addObjectCreate("Server/Service/Connector/SSLHostConfig/OpenSSLConf", "org.apache.tomcat.util.net.openssl.OpenSSLConf"); digester.addSetProperties("Server/Service/Connector/SSLHostConfig/OpenSSLConf"); digester.addSetNext("Server/Service/Connector/SSLHostConfig/OpenSSLConf", "setOpenSslConf", "org.apache.tomcat.util.net.openssl.OpenSSLConf"); digester.addObjectCreate("Server/Service/Connector/SSLHostConfig/OpenSSLConf/OpenSSLConfCmd", "org.apache.tomcat.util.net.openssl.OpenSSLConfCmd"); digester.addSetProperties("Server/Service/Connector/SSLHostConfig/OpenSSLConf/OpenSSLConfCmd"); digester.addSetNext("Server/Service/Connector/SSLHostConfig/OpenSSLConf/OpenSSLConfCmd", "addCmd", "org.apache.tomcat.util.net.openssl.OpenSSLConfCmd"); digester.addObjectCreate("Server/Service/Connector/Listener", null, // MUST be specified in the element "className"); digester.addSetProperties("Server/Service/Connector/Listener"); digester.addSetNext("Server/Service/Connector/Listener", "addLifecycleListener", "org.apache.catalina.LifecycleListener"); digester.addObjectCreate("Server/Service/Connector/UpgradeProtocol", null, // MUST be specified in the element "className"); digester.addSetProperties("Server/Service/Connector/UpgradeProtocol"); digester.addSetNext("Server/Service/Connector/UpgradeProtocol", "addUpgradeProtocol", "org.apache.coyote.UpgradeProtocol"); // Add RuleSets for nested elements digester.addRuleSet(new NamingRuleSet("Server/GlobalNamingResources/")); digester.addRuleSet(new EngineRuleSet("Server/Service/")); digester.addRuleSet(new HostRuleSet("Server/Service/Engine/")); digester.addRuleSet(new ContextRuleSet("Server/Service/Engine/Host/")); addClusterRuleSet(digester, "Server/Service/Engine/Host/Cluster/"); digester.addRuleSet(new NamingRuleSet("Server/Service/Engine/Host/Context/")); // When the 'engine' is found, set the parentClassLoader. digester.addRule("Server/Service/Engine", new SetParentClassLoaderRule(parentClassLoader)); addClusterRuleSet(digester, "Server/Service/Engine/Cluster/"); return digester; } /** * Cluster support is optional. The JARs may have been removed. */ private void addClusterRuleSet(Digester digester, String prefix) { Class<?> clazz; Constructor<?> constructor; try { clazz = Class.forName("org.apache.catalina.ha.ClusterRuleSet"); constructor = clazz.getConstructor(String.class); RuleSet ruleSet = (RuleSet) constructor.newInstance(prefix); digester.addRuleSet(ruleSet); } catch (Exception e) { if (log.isDebugEnabled()) { log.debug(sm.getString("catalina.noCluster", e.getClass().getName() + ": " + e.getMessage()), e); } else if (log.isInfoEnabled()) { log.info(sm.getString("catalina.noCluster", e.getClass().getName() + ": " + e.getMessage())); } } } /** * Create and configure the Digester we will be using for shutdown. * * @return the digester to process the stop operation */ protected Digester createStopDigester() { // Initialize the digester Digester digester = new Digester(); digester.setUseContextClassLoader(true); // Configure the rules we need for shutting down digester.addObjectCreate("Server", "org.apache.catalina.core.StandardServer", "className"); digester.addSetProperties("Server"); digester.addSetNext("Server", "setServer", "org.apache.catalina.Server"); return digester; } protected void parseServerXml(boolean start) { // Set configuration source ConfigFileLoader .setSource(new CatalinaBaseConfigurationSource(Bootstrap.getCatalinaBaseFile(), getConfigFile())); File file = configFile(); if (useGeneratedCode && !Digester.isGeneratedCodeLoaderSet()) { // Load loader String loaderClassName = generatedCodePackage + ".DigesterGeneratedCodeLoader"; try { Digester.GeneratedCodeLoader loader = (Digester.GeneratedCodeLoader) Catalina.class.getClassLoader() .loadClass(loaderClassName).getDeclaredConstructor().newInstance(); Digester.setGeneratedCodeLoader(loader); } catch (Exception e) { if (log.isDebugEnabled()) { log.debug(sm.getString("catalina.noLoader", loaderClassName), e); } else { log.info(sm.getString("catalina.noLoader", loaderClassName)); } // No loader so don't use generated code useGeneratedCode = false; } } // Init source location File serverXmlLocation = null; String xmlClassName = null; if (generateCode || useGeneratedCode) { xmlClassName = start ? generatedCodePackage + ".ServerXml" : generatedCodePackage + ".ServerXmlStop"; } if (generateCode) { if (generatedCodeLocationParameter != null) { generatedCodeLocation = new File(generatedCodeLocationParameter); if (!generatedCodeLocation.isAbsolute()) { generatedCodeLocation = new File(Bootstrap.getCatalinaHomeFile(), generatedCodeLocationParameter); } } else if (generatedCodeLocation == null) { generatedCodeLocation = new File(Bootstrap.getCatalinaHomeFile(), "work"); } serverXmlLocation = new File(generatedCodeLocation, generatedCodePackage); if (!serverXmlLocation.isDirectory() && !serverXmlLocation.mkdirs()) { log.warn(sm.getString("catalina.generatedCodeLocationError", generatedCodeLocation.getAbsolutePath())); // Disable code generation generateCode = false; } } ServerXml serverXml = null; if (useGeneratedCode) { serverXml = (ServerXml) Digester.loadGeneratedClass(xmlClassName); } if (serverXml != null) { try { serverXml.load(this); } catch (Exception e) { log.warn(sm.getString("catalina.configFail", "GeneratedCode"), e); } } else { try (ConfigurationSource.Resource resource = ConfigFileLoader.getSource().getServerXml()) { // Create and execute our Digester Digester digester = start ? createStartDigester() : createStopDigester(); InputStream inputStream = resource.getInputStream(); InputSource inputSource = new InputSource(resource.getURI().toURL().toString()); inputSource.setByteStream(inputStream); digester.push(this); if (generateCode) { digester.startGeneratingCode(); generateClassHeader(digester, start); } digester.parse(inputSource); if (generateCode) { generateClassFooter(digester); try (FileWriter writer = new FileWriter( new File(serverXmlLocation, start ? "ServerXml.java" : "ServerXmlStop.java"))) { writer.write(digester.getGeneratedCode().toString()); } digester.endGeneratingCode(); Digester.addGeneratedClass(xmlClassName); } } catch (Exception e) { log.warn(sm.getString("catalina.configFail", file.getAbsolutePath()), e); if (file.exists() && !file.canRead()) { log.warn(sm.getString("catalina.incorrectPermissions")); } } } } public void stopServer() { stopServer(null); } public void stopServer(String[] arguments) { if (arguments != null) { arguments(arguments); } Server s = getServer(); if (s == null) { parseServerXml(false); if (getServer() == null) { log.error(sm.getString("catalina.stopError")); System.exit(1); } } else { // Server object already present. Must be running as a service try { s.stop(); s.destroy(); } catch (LifecycleException e) { log.error(sm.getString("catalina.stopError"), e); } return; } // Stop the existing server s = getServer(); if (s.getPortWithOffset() > 0) { try (Socket socket = new Socket(s.getAddress(), s.getPortWithOffset()); OutputStream stream = socket.getOutputStream()) { String shutdown = s.getShutdown(); for (int i = 0; i < shutdown.length(); i++) { stream.write(shutdown.charAt(i)); } stream.flush(); } catch (ConnectException ce) { log.error(sm.getString("catalina.stopServer.connectException", s.getAddress(), String.valueOf(s.getPortWithOffset()), String.valueOf(s.getPort()), String.valueOf(s.getPortOffset()))); log.error(sm.getString("catalina.stopError"), ce); System.exit(1); } catch (IOException ioe) { log.error(sm.getString("catalina.stopError"), ioe); System.exit(1); } } else { log.error(sm.getString("catalina.stopServer")); System.exit(1); } } /** * Start a new server instance. */ public void load() { if (loaded) { return; } loaded = true; long t1 = System.nanoTime(); // Before digester - it may be needed initNaming(); // Parse main server.xml parseServerXml(true); Server s = getServer(); if (s == null) { return; } getServer().setCatalina(this); getServer().setCatalinaHome(Bootstrap.getCatalinaHomeFile()); getServer().setCatalinaBase(Bootstrap.getCatalinaBaseFile()); // Stream redirection initStreams(); // Start the new server try { getServer().init(); } catch (LifecycleException e) { if (throwOnInitFailure) { throw new Error(e); } else { log.error(sm.getString("catalina.initError"), e); } } if (log.isInfoEnabled()) { log.info(sm.getString("catalina.init", Long.toString(TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - t1)))); } } /* * Load using arguments */ public void load(String[] args) { try { if (arguments(args)) { load(); } } catch (Exception e) { e.printStackTrace(System.out); } } /** * Start a new server instance. */ public void start() { if (getServer() == null) { load(); } if (getServer() == null) { log.fatal(sm.getString("catalina.noServer")); return; } long t1 = System.nanoTime(); // Start the new server try { getServer().start(); } catch (LifecycleException e) { log.fatal(sm.getString("catalina.serverStartFail"), e); try { getServer().destroy(); } catch (LifecycleException e1) { log.debug(sm.getString("catalina.destroyFail"), e1); } return; } if (log.isInfoEnabled()) { log.info(sm.getString("catalina.startup", Long.toString(TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - t1)))); } if (generateCode) { // Generate loader which will load all generated classes generateLoader(); } // Register shutdown hook if (useShutdownHook) { if (shutdownHook == null) { shutdownHook = new CatalinaShutdownHook(); } Runtime.getRuntime().addShutdownHook(shutdownHook); // If JULI is being used, disable JULI's shutdown hook since // shutdown hooks run in parallel and log messages may be lost // if JULI's hook completes before the CatalinaShutdownHook() LogManager logManager = LogManager.getLogManager(); if (logManager instanceof ClassLoaderLogManager) { ((ClassLoaderLogManager) logManager).setUseShutdownHook(false); } } if (await) { await(); stop(); } } /** * Stop an existing server instance. */ public void stop() { try { // Remove the ShutdownHook first so that server.stop() // doesn't get invoked twice if (useShutdownHook) { Runtime.getRuntime().removeShutdownHook(shutdownHook); // If JULI is being used, re-enable JULI's shutdown to ensure // log messages are not lost LogManager logManager = LogManager.getLogManager(); if (logManager instanceof ClassLoaderLogManager) { ((ClassLoaderLogManager) logManager).setUseShutdownHook(true); } } } catch (Throwable t) { ExceptionUtils.handleThrowable(t); // This will fail on JDK 1.2. Ignoring, as Tomcat can run // fine without the shutdown hook. } // Shut down the server try { Server s = getServer(); LifecycleState state = s.getState(); if (LifecycleState.STOPPING_PREP.compareTo(state) <= 0 && LifecycleState.DESTROYED.compareTo(state) >= 0) { // Nothing to do. stop() was already called } else { s.stop(); s.destroy(); } } catch (LifecycleException e) { log.error(sm.getString("catalina.stopError"), e); } } /** * Await and shutdown. */ public void await() { getServer().await(); } /** * Print usage information for this application. */ protected void usage() { System.out.println(sm.getString("catalina.usage")); } protected void initStreams() { // Replace System.out and System.err with a custom PrintStream System.setOut(new SystemLogHandler(System.out)); System.setErr(new SystemLogHandler(System.err)); } protected void initNaming() { // Setting additional variables if (!useNaming) { log.info(sm.getString("catalina.noNaming")); System.setProperty("catalina.useNaming", "false"); } else { System.setProperty("catalina.useNaming", "true"); String value = "org.apache.naming"; String oldValue = System.getProperty(javax.naming.Context.URL_PKG_PREFIXES); if (oldValue != null) { value = value + ":" + oldValue; } System.setProperty(javax.naming.Context.URL_PKG_PREFIXES, value); if (log.isDebugEnabled()) { log.debug(sm.getString("catalina.namingPrefix", value)); } value = System.getProperty(javax.naming.Context.INITIAL_CONTEXT_FACTORY); if (value == null) { System.setProperty(javax.naming.Context.INITIAL_CONTEXT_FACTORY, "org.apache.naming.java.javaURLContextFactory"); } else { log.debug(sm.getString("catalina.initialContextFactory", value)); } } } protected void generateLoader() { String loaderClassName = "DigesterGeneratedCodeLoader"; StringBuilder code = new StringBuilder(); code.append("package ").append(generatedCodePackage).append(';').append(System.lineSeparator()); code.append("public class ").append(loaderClassName); code.append(" implements org.apache.tomcat.util.digester.Digester.GeneratedCodeLoader {") .append(System.lineSeparator()); code.append("public Object loadGeneratedCode(String className) {").append(System.lineSeparator()); code.append("switch (className) {").append(System.lineSeparator()); for (String generatedClassName : Digester.getGeneratedClasses()) { code.append("case \"").append(generatedClassName).append("\" : return new ").append(generatedClassName); code.append("();").append(System.lineSeparator()); } code.append("default: return null; }").append(System.lineSeparator()); code.append("}}").append(System.lineSeparator()); File loaderLocation = new File(generatedCodeLocation, generatedCodePackage); try (FileWriter writer = new FileWriter(new File(loaderLocation, loaderClassName + ".java"))) { writer.write(code.toString()); } catch (IOException ioe) { // Should not happen log.debug(sm.getString("catalina.loaderWriteFail"), ioe); } } protected void generateClassHeader(Digester digester, boolean start) { StringBuilder code = digester.getGeneratedCode(); code.append("package ").append(generatedCodePackage).append(';').append(System.lineSeparator()); code.append("public class ServerXml"); if (!start) { code.append("Stop"); } code.append(" implements "); code.append(ServerXml.class.getName().replace('$', '.')).append(" {").append(System.lineSeparator()); code.append("public void load(").append(Catalina.class.getName()); code.append(' ').append(digester.toVariableName(this)).append(") throws Exception {") .append(System.lineSeparator()); } protected void generateClassFooter(Digester digester) { StringBuilder code = digester.getGeneratedCode(); code.append('}').append(System.lineSeparator()); code.append('}').append(System.lineSeparator()); } public interface ServerXml { void load(Catalina catalina) throws Exception; } // --------------------------------------- CatalinaShutdownHook Inner Class /** * Shutdown hook which will perform a clean shutdown of Catalina if needed. */ protected class CatalinaShutdownHook extends Thread { @Override public void run() { try { if (getServer() != null) { Catalina.this.stop(); } } catch (Throwable ex) { ExceptionUtils.handleThrowable(ex); log.error(sm.getString("catalina.shutdownHookFail"), ex); } finally { // If JULI is used, shut JULI down *after* the server shuts down // so log messages aren't lost LogManager logManager = LogManager.getLogManager(); if (logManager instanceof ClassLoaderLogManager) { ((ClassLoaderLogManager) logManager).shutdown(); } } } } private static final Log log = LogFactory.getLog(Catalina.class); /** * Rule that sets the parent class loader for the top object on the stack, which must be a <code>Container</code>. */ final class SetParentClassLoaderRule extends Rule { SetParentClassLoaderRule(ClassLoader parentClassLoader) { this.parentClassLoader = parentClassLoader; } ClassLoader parentClassLoader; @Override public void begin(String namespace, String name, Attributes attributes) throws Exception { if (digester.getLogger().isTraceEnabled()) { digester.getLogger().trace("Setting parent class loader"); } Container top = (Container) digester.peek(); top.setParentClassLoader(parentClassLoader); StringBuilder code = digester.getGeneratedCode(); if (code != null) { code.append(digester.toVariableName(top)).append(".setParentClassLoader("); code.append(digester.toVariableName(Catalina.this)).append(".getParentClassLoader());"); code.append(System.lineSeparator()); } } } }
googleads/google-ads-java
35,133
google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/errors/ImageErrorEnum.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v19/errors/image_error.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v19.errors; /** * <pre> * Container for enum describing possible image errors. * </pre> * * Protobuf type {@code google.ads.googleads.v19.errors.ImageErrorEnum} */ public final class ImageErrorEnum extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v19.errors.ImageErrorEnum) ImageErrorEnumOrBuilder { private static final long serialVersionUID = 0L; // Use ImageErrorEnum.newBuilder() to construct. private ImageErrorEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ImageErrorEnum() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ImageErrorEnum(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.errors.ImageErrorProto.internal_static_google_ads_googleads_v19_errors_ImageErrorEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.errors.ImageErrorProto.internal_static_google_ads_googleads_v19_errors_ImageErrorEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.errors.ImageErrorEnum.class, com.google.ads.googleads.v19.errors.ImageErrorEnum.Builder.class); } /** * <pre> * Enum describing possible image errors. * </pre> * * Protobuf enum {@code google.ads.googleads.v19.errors.ImageErrorEnum.ImageError} */ public enum ImageError implements com.google.protobuf.ProtocolMessageEnum { /** * <pre> * Enum unspecified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ UNSPECIFIED(0), /** * <pre> * The received error code is not known in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ UNKNOWN(1), /** * <pre> * The image is not valid. * </pre> * * <code>INVALID_IMAGE = 2;</code> */ INVALID_IMAGE(2), /** * <pre> * The image could not be stored. * </pre> * * <code>STORAGE_ERROR = 3;</code> */ STORAGE_ERROR(3), /** * <pre> * There was a problem with the request. * </pre> * * <code>BAD_REQUEST = 4;</code> */ BAD_REQUEST(4), /** * <pre> * The image is not of legal dimensions. * </pre> * * <code>UNEXPECTED_SIZE = 5;</code> */ UNEXPECTED_SIZE(5), /** * <pre> * Animated image are not permitted. * </pre> * * <code>ANIMATED_NOT_ALLOWED = 6;</code> */ ANIMATED_NOT_ALLOWED(6), /** * <pre> * Animation is too long. * </pre> * * <code>ANIMATION_TOO_LONG = 7;</code> */ ANIMATION_TOO_LONG(7), /** * <pre> * There was an error on the server. * </pre> * * <code>SERVER_ERROR = 8;</code> */ SERVER_ERROR(8), /** * <pre> * Image cannot be in CMYK color format. * </pre> * * <code>CMYK_JPEG_NOT_ALLOWED = 9;</code> */ CMYK_JPEG_NOT_ALLOWED(9), /** * <pre> * Flash images are not permitted. * </pre> * * <code>FLASH_NOT_ALLOWED = 10;</code> */ FLASH_NOT_ALLOWED(10), /** * <pre> * Flash images must support clickTag. * </pre> * * <code>FLASH_WITHOUT_CLICKTAG = 11;</code> */ FLASH_WITHOUT_CLICKTAG(11), /** * <pre> * A flash error has occurred after fixing the click tag. * </pre> * * <code>FLASH_ERROR_AFTER_FIXING_CLICK_TAG = 12;</code> */ FLASH_ERROR_AFTER_FIXING_CLICK_TAG(12), /** * <pre> * Unacceptable visual effects. * </pre> * * <code>ANIMATED_VISUAL_EFFECT = 13;</code> */ ANIMATED_VISUAL_EFFECT(13), /** * <pre> * There was a problem with the flash image. * </pre> * * <code>FLASH_ERROR = 14;</code> */ FLASH_ERROR(14), /** * <pre> * Incorrect image layout. * </pre> * * <code>LAYOUT_PROBLEM = 15;</code> */ LAYOUT_PROBLEM(15), /** * <pre> * There was a problem reading the image file. * </pre> * * <code>PROBLEM_READING_IMAGE_FILE = 16;</code> */ PROBLEM_READING_IMAGE_FILE(16), /** * <pre> * There was an error storing the image. * </pre> * * <code>ERROR_STORING_IMAGE = 17;</code> */ ERROR_STORING_IMAGE(17), /** * <pre> * The aspect ratio of the image is not allowed. * </pre> * * <code>ASPECT_RATIO_NOT_ALLOWED = 18;</code> */ ASPECT_RATIO_NOT_ALLOWED(18), /** * <pre> * Flash cannot have network objects. * </pre> * * <code>FLASH_HAS_NETWORK_OBJECTS = 19;</code> */ FLASH_HAS_NETWORK_OBJECTS(19), /** * <pre> * Flash cannot have network methods. * </pre> * * <code>FLASH_HAS_NETWORK_METHODS = 20;</code> */ FLASH_HAS_NETWORK_METHODS(20), /** * <pre> * Flash cannot have a Url. * </pre> * * <code>FLASH_HAS_URL = 21;</code> */ FLASH_HAS_URL(21), /** * <pre> * Flash cannot use mouse tracking. * </pre> * * <code>FLASH_HAS_MOUSE_TRACKING = 22;</code> */ FLASH_HAS_MOUSE_TRACKING(22), /** * <pre> * Flash cannot have a random number. * </pre> * * <code>FLASH_HAS_RANDOM_NUM = 23;</code> */ FLASH_HAS_RANDOM_NUM(23), /** * <pre> * Ad click target cannot be '_self'. * </pre> * * <code>FLASH_SELF_TARGETS = 24;</code> */ FLASH_SELF_TARGETS(24), /** * <pre> * GetUrl method should only use '_blank'. * </pre> * * <code>FLASH_BAD_GETURL_TARGET = 25;</code> */ FLASH_BAD_GETURL_TARGET(25), /** * <pre> * Flash version is not supported. * </pre> * * <code>FLASH_VERSION_NOT_SUPPORTED = 26;</code> */ FLASH_VERSION_NOT_SUPPORTED(26), /** * <pre> * Flash movies need to have hard coded click URL or clickTAG * </pre> * * <code>FLASH_WITHOUT_HARD_CODED_CLICK_URL = 27;</code> */ FLASH_WITHOUT_HARD_CODED_CLICK_URL(27), /** * <pre> * Uploaded flash file is corrupted. * </pre> * * <code>INVALID_FLASH_FILE = 28;</code> */ INVALID_FLASH_FILE(28), /** * <pre> * Uploaded flash file can be parsed, but the click tag can not be fixed * properly. * </pre> * * <code>FAILED_TO_FIX_CLICK_TAG_IN_FLASH = 29;</code> */ FAILED_TO_FIX_CLICK_TAG_IN_FLASH(29), /** * <pre> * Flash movie accesses network resources * </pre> * * <code>FLASH_ACCESSES_NETWORK_RESOURCES = 30;</code> */ FLASH_ACCESSES_NETWORK_RESOURCES(30), /** * <pre> * Flash movie attempts to call external javascript code * </pre> * * <code>FLASH_EXTERNAL_JS_CALL = 31;</code> */ FLASH_EXTERNAL_JS_CALL(31), /** * <pre> * Flash movie attempts to call flash system commands * </pre> * * <code>FLASH_EXTERNAL_FS_CALL = 32;</code> */ FLASH_EXTERNAL_FS_CALL(32), /** * <pre> * Image file is too large. * </pre> * * <code>FILE_TOO_LARGE = 33;</code> */ FILE_TOO_LARGE(33), /** * <pre> * Image data is too large. * </pre> * * <code>IMAGE_DATA_TOO_LARGE = 34;</code> */ IMAGE_DATA_TOO_LARGE(34), /** * <pre> * Error while processing the image. * </pre> * * <code>IMAGE_PROCESSING_ERROR = 35;</code> */ IMAGE_PROCESSING_ERROR(35), /** * <pre> * Image is too small. * </pre> * * <code>IMAGE_TOO_SMALL = 36;</code> */ IMAGE_TOO_SMALL(36), /** * <pre> * Input was invalid. * </pre> * * <code>INVALID_INPUT = 37;</code> */ INVALID_INPUT(37), /** * <pre> * There was a problem reading the image file. * </pre> * * <code>PROBLEM_READING_FILE = 38;</code> */ PROBLEM_READING_FILE(38), /** * <pre> * Image constraints are violated, but details like ASPECT_RATIO_NOT_ALLOWED * can't be provided. This happens when asset spec contains more than one * constraint and different criteria of different constraints are violated. * </pre> * * <code>IMAGE_CONSTRAINTS_VIOLATED = 39;</code> */ IMAGE_CONSTRAINTS_VIOLATED(39), /** * <pre> * Image format is not allowed. * </pre> * * <code>FORMAT_NOT_ALLOWED = 40;</code> */ FORMAT_NOT_ALLOWED(40), UNRECOGNIZED(-1), ; /** * <pre> * Enum unspecified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ public static final int UNSPECIFIED_VALUE = 0; /** * <pre> * The received error code is not known in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ public static final int UNKNOWN_VALUE = 1; /** * <pre> * The image is not valid. * </pre> * * <code>INVALID_IMAGE = 2;</code> */ public static final int INVALID_IMAGE_VALUE = 2; /** * <pre> * The image could not be stored. * </pre> * * <code>STORAGE_ERROR = 3;</code> */ public static final int STORAGE_ERROR_VALUE = 3; /** * <pre> * There was a problem with the request. * </pre> * * <code>BAD_REQUEST = 4;</code> */ public static final int BAD_REQUEST_VALUE = 4; /** * <pre> * The image is not of legal dimensions. * </pre> * * <code>UNEXPECTED_SIZE = 5;</code> */ public static final int UNEXPECTED_SIZE_VALUE = 5; /** * <pre> * Animated image are not permitted. * </pre> * * <code>ANIMATED_NOT_ALLOWED = 6;</code> */ public static final int ANIMATED_NOT_ALLOWED_VALUE = 6; /** * <pre> * Animation is too long. * </pre> * * <code>ANIMATION_TOO_LONG = 7;</code> */ public static final int ANIMATION_TOO_LONG_VALUE = 7; /** * <pre> * There was an error on the server. * </pre> * * <code>SERVER_ERROR = 8;</code> */ public static final int SERVER_ERROR_VALUE = 8; /** * <pre> * Image cannot be in CMYK color format. * </pre> * * <code>CMYK_JPEG_NOT_ALLOWED = 9;</code> */ public static final int CMYK_JPEG_NOT_ALLOWED_VALUE = 9; /** * <pre> * Flash images are not permitted. * </pre> * * <code>FLASH_NOT_ALLOWED = 10;</code> */ public static final int FLASH_NOT_ALLOWED_VALUE = 10; /** * <pre> * Flash images must support clickTag. * </pre> * * <code>FLASH_WITHOUT_CLICKTAG = 11;</code> */ public static final int FLASH_WITHOUT_CLICKTAG_VALUE = 11; /** * <pre> * A flash error has occurred after fixing the click tag. * </pre> * * <code>FLASH_ERROR_AFTER_FIXING_CLICK_TAG = 12;</code> */ public static final int FLASH_ERROR_AFTER_FIXING_CLICK_TAG_VALUE = 12; /** * <pre> * Unacceptable visual effects. * </pre> * * <code>ANIMATED_VISUAL_EFFECT = 13;</code> */ public static final int ANIMATED_VISUAL_EFFECT_VALUE = 13; /** * <pre> * There was a problem with the flash image. * </pre> * * <code>FLASH_ERROR = 14;</code> */ public static final int FLASH_ERROR_VALUE = 14; /** * <pre> * Incorrect image layout. * </pre> * * <code>LAYOUT_PROBLEM = 15;</code> */ public static final int LAYOUT_PROBLEM_VALUE = 15; /** * <pre> * There was a problem reading the image file. * </pre> * * <code>PROBLEM_READING_IMAGE_FILE = 16;</code> */ public static final int PROBLEM_READING_IMAGE_FILE_VALUE = 16; /** * <pre> * There was an error storing the image. * </pre> * * <code>ERROR_STORING_IMAGE = 17;</code> */ public static final int ERROR_STORING_IMAGE_VALUE = 17; /** * <pre> * The aspect ratio of the image is not allowed. * </pre> * * <code>ASPECT_RATIO_NOT_ALLOWED = 18;</code> */ public static final int ASPECT_RATIO_NOT_ALLOWED_VALUE = 18; /** * <pre> * Flash cannot have network objects. * </pre> * * <code>FLASH_HAS_NETWORK_OBJECTS = 19;</code> */ public static final int FLASH_HAS_NETWORK_OBJECTS_VALUE = 19; /** * <pre> * Flash cannot have network methods. * </pre> * * <code>FLASH_HAS_NETWORK_METHODS = 20;</code> */ public static final int FLASH_HAS_NETWORK_METHODS_VALUE = 20; /** * <pre> * Flash cannot have a Url. * </pre> * * <code>FLASH_HAS_URL = 21;</code> */ public static final int FLASH_HAS_URL_VALUE = 21; /** * <pre> * Flash cannot use mouse tracking. * </pre> * * <code>FLASH_HAS_MOUSE_TRACKING = 22;</code> */ public static final int FLASH_HAS_MOUSE_TRACKING_VALUE = 22; /** * <pre> * Flash cannot have a random number. * </pre> * * <code>FLASH_HAS_RANDOM_NUM = 23;</code> */ public static final int FLASH_HAS_RANDOM_NUM_VALUE = 23; /** * <pre> * Ad click target cannot be '_self'. * </pre> * * <code>FLASH_SELF_TARGETS = 24;</code> */ public static final int FLASH_SELF_TARGETS_VALUE = 24; /** * <pre> * GetUrl method should only use '_blank'. * </pre> * * <code>FLASH_BAD_GETURL_TARGET = 25;</code> */ public static final int FLASH_BAD_GETURL_TARGET_VALUE = 25; /** * <pre> * Flash version is not supported. * </pre> * * <code>FLASH_VERSION_NOT_SUPPORTED = 26;</code> */ public static final int FLASH_VERSION_NOT_SUPPORTED_VALUE = 26; /** * <pre> * Flash movies need to have hard coded click URL or clickTAG * </pre> * * <code>FLASH_WITHOUT_HARD_CODED_CLICK_URL = 27;</code> */ public static final int FLASH_WITHOUT_HARD_CODED_CLICK_URL_VALUE = 27; /** * <pre> * Uploaded flash file is corrupted. * </pre> * * <code>INVALID_FLASH_FILE = 28;</code> */ public static final int INVALID_FLASH_FILE_VALUE = 28; /** * <pre> * Uploaded flash file can be parsed, but the click tag can not be fixed * properly. * </pre> * * <code>FAILED_TO_FIX_CLICK_TAG_IN_FLASH = 29;</code> */ public static final int FAILED_TO_FIX_CLICK_TAG_IN_FLASH_VALUE = 29; /** * <pre> * Flash movie accesses network resources * </pre> * * <code>FLASH_ACCESSES_NETWORK_RESOURCES = 30;</code> */ public static final int FLASH_ACCESSES_NETWORK_RESOURCES_VALUE = 30; /** * <pre> * Flash movie attempts to call external javascript code * </pre> * * <code>FLASH_EXTERNAL_JS_CALL = 31;</code> */ public static final int FLASH_EXTERNAL_JS_CALL_VALUE = 31; /** * <pre> * Flash movie attempts to call flash system commands * </pre> * * <code>FLASH_EXTERNAL_FS_CALL = 32;</code> */ public static final int FLASH_EXTERNAL_FS_CALL_VALUE = 32; /** * <pre> * Image file is too large. * </pre> * * <code>FILE_TOO_LARGE = 33;</code> */ public static final int FILE_TOO_LARGE_VALUE = 33; /** * <pre> * Image data is too large. * </pre> * * <code>IMAGE_DATA_TOO_LARGE = 34;</code> */ public static final int IMAGE_DATA_TOO_LARGE_VALUE = 34; /** * <pre> * Error while processing the image. * </pre> * * <code>IMAGE_PROCESSING_ERROR = 35;</code> */ public static final int IMAGE_PROCESSING_ERROR_VALUE = 35; /** * <pre> * Image is too small. * </pre> * * <code>IMAGE_TOO_SMALL = 36;</code> */ public static final int IMAGE_TOO_SMALL_VALUE = 36; /** * <pre> * Input was invalid. * </pre> * * <code>INVALID_INPUT = 37;</code> */ public static final int INVALID_INPUT_VALUE = 37; /** * <pre> * There was a problem reading the image file. * </pre> * * <code>PROBLEM_READING_FILE = 38;</code> */ public static final int PROBLEM_READING_FILE_VALUE = 38; /** * <pre> * Image constraints are violated, but details like ASPECT_RATIO_NOT_ALLOWED * can't be provided. This happens when asset spec contains more than one * constraint and different criteria of different constraints are violated. * </pre> * * <code>IMAGE_CONSTRAINTS_VIOLATED = 39;</code> */ public static final int IMAGE_CONSTRAINTS_VIOLATED_VALUE = 39; /** * <pre> * Image format is not allowed. * </pre> * * <code>FORMAT_NOT_ALLOWED = 40;</code> */ public static final int FORMAT_NOT_ALLOWED_VALUE = 40; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static ImageError valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static ImageError forNumber(int value) { switch (value) { case 0: return UNSPECIFIED; case 1: return UNKNOWN; case 2: return INVALID_IMAGE; case 3: return STORAGE_ERROR; case 4: return BAD_REQUEST; case 5: return UNEXPECTED_SIZE; case 6: return ANIMATED_NOT_ALLOWED; case 7: return ANIMATION_TOO_LONG; case 8: return SERVER_ERROR; case 9: return CMYK_JPEG_NOT_ALLOWED; case 10: return FLASH_NOT_ALLOWED; case 11: return FLASH_WITHOUT_CLICKTAG; case 12: return FLASH_ERROR_AFTER_FIXING_CLICK_TAG; case 13: return ANIMATED_VISUAL_EFFECT; case 14: return FLASH_ERROR; case 15: return LAYOUT_PROBLEM; case 16: return PROBLEM_READING_IMAGE_FILE; case 17: return ERROR_STORING_IMAGE; case 18: return ASPECT_RATIO_NOT_ALLOWED; case 19: return FLASH_HAS_NETWORK_OBJECTS; case 20: return FLASH_HAS_NETWORK_METHODS; case 21: return FLASH_HAS_URL; case 22: return FLASH_HAS_MOUSE_TRACKING; case 23: return FLASH_HAS_RANDOM_NUM; case 24: return FLASH_SELF_TARGETS; case 25: return FLASH_BAD_GETURL_TARGET; case 26: return FLASH_VERSION_NOT_SUPPORTED; case 27: return FLASH_WITHOUT_HARD_CODED_CLICK_URL; case 28: return INVALID_FLASH_FILE; case 29: return FAILED_TO_FIX_CLICK_TAG_IN_FLASH; case 30: return FLASH_ACCESSES_NETWORK_RESOURCES; case 31: return FLASH_EXTERNAL_JS_CALL; case 32: return FLASH_EXTERNAL_FS_CALL; case 33: return FILE_TOO_LARGE; case 34: return IMAGE_DATA_TOO_LARGE; case 35: return IMAGE_PROCESSING_ERROR; case 36: return IMAGE_TOO_SMALL; case 37: return INVALID_INPUT; case 38: return PROBLEM_READING_FILE; case 39: return IMAGE_CONSTRAINTS_VIOLATED; case 40: return FORMAT_NOT_ALLOWED; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<ImageError> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap< ImageError> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<ImageError>() { public ImageError findValueByNumber(int number) { return ImageError.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.ads.googleads.v19.errors.ImageErrorEnum.getDescriptor().getEnumTypes().get(0); } private static final ImageError[] VALUES = values(); public static ImageError valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private ImageError(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.ads.googleads.v19.errors.ImageErrorEnum.ImageError) } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v19.errors.ImageErrorEnum)) { return super.equals(obj); } com.google.ads.googleads.v19.errors.ImageErrorEnum other = (com.google.ads.googleads.v19.errors.ImageErrorEnum) obj; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v19.errors.ImageErrorEnum parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.errors.ImageErrorEnum parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.errors.ImageErrorEnum parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.errors.ImageErrorEnum parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.errors.ImageErrorEnum parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.errors.ImageErrorEnum parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.errors.ImageErrorEnum parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.errors.ImageErrorEnum parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.errors.ImageErrorEnum parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.errors.ImageErrorEnum parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.errors.ImageErrorEnum parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.errors.ImageErrorEnum parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v19.errors.ImageErrorEnum prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Container for enum describing possible image errors. * </pre> * * Protobuf type {@code google.ads.googleads.v19.errors.ImageErrorEnum} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.errors.ImageErrorEnum) com.google.ads.googleads.v19.errors.ImageErrorEnumOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.errors.ImageErrorProto.internal_static_google_ads_googleads_v19_errors_ImageErrorEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.errors.ImageErrorProto.internal_static_google_ads_googleads_v19_errors_ImageErrorEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.errors.ImageErrorEnum.class, com.google.ads.googleads.v19.errors.ImageErrorEnum.Builder.class); } // Construct using com.google.ads.googleads.v19.errors.ImageErrorEnum.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v19.errors.ImageErrorProto.internal_static_google_ads_googleads_v19_errors_ImageErrorEnum_descriptor; } @java.lang.Override public com.google.ads.googleads.v19.errors.ImageErrorEnum getDefaultInstanceForType() { return com.google.ads.googleads.v19.errors.ImageErrorEnum.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v19.errors.ImageErrorEnum build() { com.google.ads.googleads.v19.errors.ImageErrorEnum result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v19.errors.ImageErrorEnum buildPartial() { com.google.ads.googleads.v19.errors.ImageErrorEnum result = new com.google.ads.googleads.v19.errors.ImageErrorEnum(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v19.errors.ImageErrorEnum) { return mergeFrom((com.google.ads.googleads.v19.errors.ImageErrorEnum)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v19.errors.ImageErrorEnum other) { if (other == com.google.ads.googleads.v19.errors.ImageErrorEnum.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.errors.ImageErrorEnum) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v19.errors.ImageErrorEnum) private static final com.google.ads.googleads.v19.errors.ImageErrorEnum DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v19.errors.ImageErrorEnum(); } public static com.google.ads.googleads.v19.errors.ImageErrorEnum getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ImageErrorEnum> PARSER = new com.google.protobuf.AbstractParser<ImageErrorEnum>() { @java.lang.Override public ImageErrorEnum parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ImageErrorEnum> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ImageErrorEnum> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v19.errors.ImageErrorEnum getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,133
google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/errors/ImageErrorEnum.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v20/errors/image_error.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v20.errors; /** * <pre> * Container for enum describing possible image errors. * </pre> * * Protobuf type {@code google.ads.googleads.v20.errors.ImageErrorEnum} */ public final class ImageErrorEnum extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v20.errors.ImageErrorEnum) ImageErrorEnumOrBuilder { private static final long serialVersionUID = 0L; // Use ImageErrorEnum.newBuilder() to construct. private ImageErrorEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ImageErrorEnum() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ImageErrorEnum(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.errors.ImageErrorProto.internal_static_google_ads_googleads_v20_errors_ImageErrorEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.errors.ImageErrorProto.internal_static_google_ads_googleads_v20_errors_ImageErrorEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.errors.ImageErrorEnum.class, com.google.ads.googleads.v20.errors.ImageErrorEnum.Builder.class); } /** * <pre> * Enum describing possible image errors. * </pre> * * Protobuf enum {@code google.ads.googleads.v20.errors.ImageErrorEnum.ImageError} */ public enum ImageError implements com.google.protobuf.ProtocolMessageEnum { /** * <pre> * Enum unspecified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ UNSPECIFIED(0), /** * <pre> * The received error code is not known in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ UNKNOWN(1), /** * <pre> * The image is not valid. * </pre> * * <code>INVALID_IMAGE = 2;</code> */ INVALID_IMAGE(2), /** * <pre> * The image could not be stored. * </pre> * * <code>STORAGE_ERROR = 3;</code> */ STORAGE_ERROR(3), /** * <pre> * There was a problem with the request. * </pre> * * <code>BAD_REQUEST = 4;</code> */ BAD_REQUEST(4), /** * <pre> * The image is not of legal dimensions. * </pre> * * <code>UNEXPECTED_SIZE = 5;</code> */ UNEXPECTED_SIZE(5), /** * <pre> * Animated image are not permitted. * </pre> * * <code>ANIMATED_NOT_ALLOWED = 6;</code> */ ANIMATED_NOT_ALLOWED(6), /** * <pre> * Animation is too long. * </pre> * * <code>ANIMATION_TOO_LONG = 7;</code> */ ANIMATION_TOO_LONG(7), /** * <pre> * There was an error on the server. * </pre> * * <code>SERVER_ERROR = 8;</code> */ SERVER_ERROR(8), /** * <pre> * Image cannot be in CMYK color format. * </pre> * * <code>CMYK_JPEG_NOT_ALLOWED = 9;</code> */ CMYK_JPEG_NOT_ALLOWED(9), /** * <pre> * Flash images are not permitted. * </pre> * * <code>FLASH_NOT_ALLOWED = 10;</code> */ FLASH_NOT_ALLOWED(10), /** * <pre> * Flash images must support clickTag. * </pre> * * <code>FLASH_WITHOUT_CLICKTAG = 11;</code> */ FLASH_WITHOUT_CLICKTAG(11), /** * <pre> * A flash error has occurred after fixing the click tag. * </pre> * * <code>FLASH_ERROR_AFTER_FIXING_CLICK_TAG = 12;</code> */ FLASH_ERROR_AFTER_FIXING_CLICK_TAG(12), /** * <pre> * Unacceptable visual effects. * </pre> * * <code>ANIMATED_VISUAL_EFFECT = 13;</code> */ ANIMATED_VISUAL_EFFECT(13), /** * <pre> * There was a problem with the flash image. * </pre> * * <code>FLASH_ERROR = 14;</code> */ FLASH_ERROR(14), /** * <pre> * Incorrect image layout. * </pre> * * <code>LAYOUT_PROBLEM = 15;</code> */ LAYOUT_PROBLEM(15), /** * <pre> * There was a problem reading the image file. * </pre> * * <code>PROBLEM_READING_IMAGE_FILE = 16;</code> */ PROBLEM_READING_IMAGE_FILE(16), /** * <pre> * There was an error storing the image. * </pre> * * <code>ERROR_STORING_IMAGE = 17;</code> */ ERROR_STORING_IMAGE(17), /** * <pre> * The aspect ratio of the image is not allowed. * </pre> * * <code>ASPECT_RATIO_NOT_ALLOWED = 18;</code> */ ASPECT_RATIO_NOT_ALLOWED(18), /** * <pre> * Flash cannot have network objects. * </pre> * * <code>FLASH_HAS_NETWORK_OBJECTS = 19;</code> */ FLASH_HAS_NETWORK_OBJECTS(19), /** * <pre> * Flash cannot have network methods. * </pre> * * <code>FLASH_HAS_NETWORK_METHODS = 20;</code> */ FLASH_HAS_NETWORK_METHODS(20), /** * <pre> * Flash cannot have a Url. * </pre> * * <code>FLASH_HAS_URL = 21;</code> */ FLASH_HAS_URL(21), /** * <pre> * Flash cannot use mouse tracking. * </pre> * * <code>FLASH_HAS_MOUSE_TRACKING = 22;</code> */ FLASH_HAS_MOUSE_TRACKING(22), /** * <pre> * Flash cannot have a random number. * </pre> * * <code>FLASH_HAS_RANDOM_NUM = 23;</code> */ FLASH_HAS_RANDOM_NUM(23), /** * <pre> * Ad click target cannot be '_self'. * </pre> * * <code>FLASH_SELF_TARGETS = 24;</code> */ FLASH_SELF_TARGETS(24), /** * <pre> * GetUrl method should only use '_blank'. * </pre> * * <code>FLASH_BAD_GETURL_TARGET = 25;</code> */ FLASH_BAD_GETURL_TARGET(25), /** * <pre> * Flash version is not supported. * </pre> * * <code>FLASH_VERSION_NOT_SUPPORTED = 26;</code> */ FLASH_VERSION_NOT_SUPPORTED(26), /** * <pre> * Flash movies need to have hard coded click URL or clickTAG * </pre> * * <code>FLASH_WITHOUT_HARD_CODED_CLICK_URL = 27;</code> */ FLASH_WITHOUT_HARD_CODED_CLICK_URL(27), /** * <pre> * Uploaded flash file is corrupted. * </pre> * * <code>INVALID_FLASH_FILE = 28;</code> */ INVALID_FLASH_FILE(28), /** * <pre> * Uploaded flash file can be parsed, but the click tag can not be fixed * properly. * </pre> * * <code>FAILED_TO_FIX_CLICK_TAG_IN_FLASH = 29;</code> */ FAILED_TO_FIX_CLICK_TAG_IN_FLASH(29), /** * <pre> * Flash movie accesses network resources * </pre> * * <code>FLASH_ACCESSES_NETWORK_RESOURCES = 30;</code> */ FLASH_ACCESSES_NETWORK_RESOURCES(30), /** * <pre> * Flash movie attempts to call external javascript code * </pre> * * <code>FLASH_EXTERNAL_JS_CALL = 31;</code> */ FLASH_EXTERNAL_JS_CALL(31), /** * <pre> * Flash movie attempts to call flash system commands * </pre> * * <code>FLASH_EXTERNAL_FS_CALL = 32;</code> */ FLASH_EXTERNAL_FS_CALL(32), /** * <pre> * Image file is too large. * </pre> * * <code>FILE_TOO_LARGE = 33;</code> */ FILE_TOO_LARGE(33), /** * <pre> * Image data is too large. * </pre> * * <code>IMAGE_DATA_TOO_LARGE = 34;</code> */ IMAGE_DATA_TOO_LARGE(34), /** * <pre> * Error while processing the image. * </pre> * * <code>IMAGE_PROCESSING_ERROR = 35;</code> */ IMAGE_PROCESSING_ERROR(35), /** * <pre> * Image is too small. * </pre> * * <code>IMAGE_TOO_SMALL = 36;</code> */ IMAGE_TOO_SMALL(36), /** * <pre> * Input was invalid. * </pre> * * <code>INVALID_INPUT = 37;</code> */ INVALID_INPUT(37), /** * <pre> * There was a problem reading the image file. * </pre> * * <code>PROBLEM_READING_FILE = 38;</code> */ PROBLEM_READING_FILE(38), /** * <pre> * Image constraints are violated, but details like ASPECT_RATIO_NOT_ALLOWED * can't be provided. This happens when asset spec contains more than one * constraint and different criteria of different constraints are violated. * </pre> * * <code>IMAGE_CONSTRAINTS_VIOLATED = 39;</code> */ IMAGE_CONSTRAINTS_VIOLATED(39), /** * <pre> * Image format is not allowed. * </pre> * * <code>FORMAT_NOT_ALLOWED = 40;</code> */ FORMAT_NOT_ALLOWED(40), UNRECOGNIZED(-1), ; /** * <pre> * Enum unspecified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ public static final int UNSPECIFIED_VALUE = 0; /** * <pre> * The received error code is not known in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ public static final int UNKNOWN_VALUE = 1; /** * <pre> * The image is not valid. * </pre> * * <code>INVALID_IMAGE = 2;</code> */ public static final int INVALID_IMAGE_VALUE = 2; /** * <pre> * The image could not be stored. * </pre> * * <code>STORAGE_ERROR = 3;</code> */ public static final int STORAGE_ERROR_VALUE = 3; /** * <pre> * There was a problem with the request. * </pre> * * <code>BAD_REQUEST = 4;</code> */ public static final int BAD_REQUEST_VALUE = 4; /** * <pre> * The image is not of legal dimensions. * </pre> * * <code>UNEXPECTED_SIZE = 5;</code> */ public static final int UNEXPECTED_SIZE_VALUE = 5; /** * <pre> * Animated image are not permitted. * </pre> * * <code>ANIMATED_NOT_ALLOWED = 6;</code> */ public static final int ANIMATED_NOT_ALLOWED_VALUE = 6; /** * <pre> * Animation is too long. * </pre> * * <code>ANIMATION_TOO_LONG = 7;</code> */ public static final int ANIMATION_TOO_LONG_VALUE = 7; /** * <pre> * There was an error on the server. * </pre> * * <code>SERVER_ERROR = 8;</code> */ public static final int SERVER_ERROR_VALUE = 8; /** * <pre> * Image cannot be in CMYK color format. * </pre> * * <code>CMYK_JPEG_NOT_ALLOWED = 9;</code> */ public static final int CMYK_JPEG_NOT_ALLOWED_VALUE = 9; /** * <pre> * Flash images are not permitted. * </pre> * * <code>FLASH_NOT_ALLOWED = 10;</code> */ public static final int FLASH_NOT_ALLOWED_VALUE = 10; /** * <pre> * Flash images must support clickTag. * </pre> * * <code>FLASH_WITHOUT_CLICKTAG = 11;</code> */ public static final int FLASH_WITHOUT_CLICKTAG_VALUE = 11; /** * <pre> * A flash error has occurred after fixing the click tag. * </pre> * * <code>FLASH_ERROR_AFTER_FIXING_CLICK_TAG = 12;</code> */ public static final int FLASH_ERROR_AFTER_FIXING_CLICK_TAG_VALUE = 12; /** * <pre> * Unacceptable visual effects. * </pre> * * <code>ANIMATED_VISUAL_EFFECT = 13;</code> */ public static final int ANIMATED_VISUAL_EFFECT_VALUE = 13; /** * <pre> * There was a problem with the flash image. * </pre> * * <code>FLASH_ERROR = 14;</code> */ public static final int FLASH_ERROR_VALUE = 14; /** * <pre> * Incorrect image layout. * </pre> * * <code>LAYOUT_PROBLEM = 15;</code> */ public static final int LAYOUT_PROBLEM_VALUE = 15; /** * <pre> * There was a problem reading the image file. * </pre> * * <code>PROBLEM_READING_IMAGE_FILE = 16;</code> */ public static final int PROBLEM_READING_IMAGE_FILE_VALUE = 16; /** * <pre> * There was an error storing the image. * </pre> * * <code>ERROR_STORING_IMAGE = 17;</code> */ public static final int ERROR_STORING_IMAGE_VALUE = 17; /** * <pre> * The aspect ratio of the image is not allowed. * </pre> * * <code>ASPECT_RATIO_NOT_ALLOWED = 18;</code> */ public static final int ASPECT_RATIO_NOT_ALLOWED_VALUE = 18; /** * <pre> * Flash cannot have network objects. * </pre> * * <code>FLASH_HAS_NETWORK_OBJECTS = 19;</code> */ public static final int FLASH_HAS_NETWORK_OBJECTS_VALUE = 19; /** * <pre> * Flash cannot have network methods. * </pre> * * <code>FLASH_HAS_NETWORK_METHODS = 20;</code> */ public static final int FLASH_HAS_NETWORK_METHODS_VALUE = 20; /** * <pre> * Flash cannot have a Url. * </pre> * * <code>FLASH_HAS_URL = 21;</code> */ public static final int FLASH_HAS_URL_VALUE = 21; /** * <pre> * Flash cannot use mouse tracking. * </pre> * * <code>FLASH_HAS_MOUSE_TRACKING = 22;</code> */ public static final int FLASH_HAS_MOUSE_TRACKING_VALUE = 22; /** * <pre> * Flash cannot have a random number. * </pre> * * <code>FLASH_HAS_RANDOM_NUM = 23;</code> */ public static final int FLASH_HAS_RANDOM_NUM_VALUE = 23; /** * <pre> * Ad click target cannot be '_self'. * </pre> * * <code>FLASH_SELF_TARGETS = 24;</code> */ public static final int FLASH_SELF_TARGETS_VALUE = 24; /** * <pre> * GetUrl method should only use '_blank'. * </pre> * * <code>FLASH_BAD_GETURL_TARGET = 25;</code> */ public static final int FLASH_BAD_GETURL_TARGET_VALUE = 25; /** * <pre> * Flash version is not supported. * </pre> * * <code>FLASH_VERSION_NOT_SUPPORTED = 26;</code> */ public static final int FLASH_VERSION_NOT_SUPPORTED_VALUE = 26; /** * <pre> * Flash movies need to have hard coded click URL or clickTAG * </pre> * * <code>FLASH_WITHOUT_HARD_CODED_CLICK_URL = 27;</code> */ public static final int FLASH_WITHOUT_HARD_CODED_CLICK_URL_VALUE = 27; /** * <pre> * Uploaded flash file is corrupted. * </pre> * * <code>INVALID_FLASH_FILE = 28;</code> */ public static final int INVALID_FLASH_FILE_VALUE = 28; /** * <pre> * Uploaded flash file can be parsed, but the click tag can not be fixed * properly. * </pre> * * <code>FAILED_TO_FIX_CLICK_TAG_IN_FLASH = 29;</code> */ public static final int FAILED_TO_FIX_CLICK_TAG_IN_FLASH_VALUE = 29; /** * <pre> * Flash movie accesses network resources * </pre> * * <code>FLASH_ACCESSES_NETWORK_RESOURCES = 30;</code> */ public static final int FLASH_ACCESSES_NETWORK_RESOURCES_VALUE = 30; /** * <pre> * Flash movie attempts to call external javascript code * </pre> * * <code>FLASH_EXTERNAL_JS_CALL = 31;</code> */ public static final int FLASH_EXTERNAL_JS_CALL_VALUE = 31; /** * <pre> * Flash movie attempts to call flash system commands * </pre> * * <code>FLASH_EXTERNAL_FS_CALL = 32;</code> */ public static final int FLASH_EXTERNAL_FS_CALL_VALUE = 32; /** * <pre> * Image file is too large. * </pre> * * <code>FILE_TOO_LARGE = 33;</code> */ public static final int FILE_TOO_LARGE_VALUE = 33; /** * <pre> * Image data is too large. * </pre> * * <code>IMAGE_DATA_TOO_LARGE = 34;</code> */ public static final int IMAGE_DATA_TOO_LARGE_VALUE = 34; /** * <pre> * Error while processing the image. * </pre> * * <code>IMAGE_PROCESSING_ERROR = 35;</code> */ public static final int IMAGE_PROCESSING_ERROR_VALUE = 35; /** * <pre> * Image is too small. * </pre> * * <code>IMAGE_TOO_SMALL = 36;</code> */ public static final int IMAGE_TOO_SMALL_VALUE = 36; /** * <pre> * Input was invalid. * </pre> * * <code>INVALID_INPUT = 37;</code> */ public static final int INVALID_INPUT_VALUE = 37; /** * <pre> * There was a problem reading the image file. * </pre> * * <code>PROBLEM_READING_FILE = 38;</code> */ public static final int PROBLEM_READING_FILE_VALUE = 38; /** * <pre> * Image constraints are violated, but details like ASPECT_RATIO_NOT_ALLOWED * can't be provided. This happens when asset spec contains more than one * constraint and different criteria of different constraints are violated. * </pre> * * <code>IMAGE_CONSTRAINTS_VIOLATED = 39;</code> */ public static final int IMAGE_CONSTRAINTS_VIOLATED_VALUE = 39; /** * <pre> * Image format is not allowed. * </pre> * * <code>FORMAT_NOT_ALLOWED = 40;</code> */ public static final int FORMAT_NOT_ALLOWED_VALUE = 40; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static ImageError valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static ImageError forNumber(int value) { switch (value) { case 0: return UNSPECIFIED; case 1: return UNKNOWN; case 2: return INVALID_IMAGE; case 3: return STORAGE_ERROR; case 4: return BAD_REQUEST; case 5: return UNEXPECTED_SIZE; case 6: return ANIMATED_NOT_ALLOWED; case 7: return ANIMATION_TOO_LONG; case 8: return SERVER_ERROR; case 9: return CMYK_JPEG_NOT_ALLOWED; case 10: return FLASH_NOT_ALLOWED; case 11: return FLASH_WITHOUT_CLICKTAG; case 12: return FLASH_ERROR_AFTER_FIXING_CLICK_TAG; case 13: return ANIMATED_VISUAL_EFFECT; case 14: return FLASH_ERROR; case 15: return LAYOUT_PROBLEM; case 16: return PROBLEM_READING_IMAGE_FILE; case 17: return ERROR_STORING_IMAGE; case 18: return ASPECT_RATIO_NOT_ALLOWED; case 19: return FLASH_HAS_NETWORK_OBJECTS; case 20: return FLASH_HAS_NETWORK_METHODS; case 21: return FLASH_HAS_URL; case 22: return FLASH_HAS_MOUSE_TRACKING; case 23: return FLASH_HAS_RANDOM_NUM; case 24: return FLASH_SELF_TARGETS; case 25: return FLASH_BAD_GETURL_TARGET; case 26: return FLASH_VERSION_NOT_SUPPORTED; case 27: return FLASH_WITHOUT_HARD_CODED_CLICK_URL; case 28: return INVALID_FLASH_FILE; case 29: return FAILED_TO_FIX_CLICK_TAG_IN_FLASH; case 30: return FLASH_ACCESSES_NETWORK_RESOURCES; case 31: return FLASH_EXTERNAL_JS_CALL; case 32: return FLASH_EXTERNAL_FS_CALL; case 33: return FILE_TOO_LARGE; case 34: return IMAGE_DATA_TOO_LARGE; case 35: return IMAGE_PROCESSING_ERROR; case 36: return IMAGE_TOO_SMALL; case 37: return INVALID_INPUT; case 38: return PROBLEM_READING_FILE; case 39: return IMAGE_CONSTRAINTS_VIOLATED; case 40: return FORMAT_NOT_ALLOWED; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<ImageError> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap< ImageError> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<ImageError>() { public ImageError findValueByNumber(int number) { return ImageError.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.ads.googleads.v20.errors.ImageErrorEnum.getDescriptor().getEnumTypes().get(0); } private static final ImageError[] VALUES = values(); public static ImageError valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private ImageError(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.ads.googleads.v20.errors.ImageErrorEnum.ImageError) } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v20.errors.ImageErrorEnum)) { return super.equals(obj); } com.google.ads.googleads.v20.errors.ImageErrorEnum other = (com.google.ads.googleads.v20.errors.ImageErrorEnum) obj; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v20.errors.ImageErrorEnum parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.errors.ImageErrorEnum parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.errors.ImageErrorEnum parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.errors.ImageErrorEnum parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.errors.ImageErrorEnum parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.errors.ImageErrorEnum parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.errors.ImageErrorEnum parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.errors.ImageErrorEnum parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.errors.ImageErrorEnum parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.errors.ImageErrorEnum parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.errors.ImageErrorEnum parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.errors.ImageErrorEnum parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v20.errors.ImageErrorEnum prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Container for enum describing possible image errors. * </pre> * * Protobuf type {@code google.ads.googleads.v20.errors.ImageErrorEnum} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.errors.ImageErrorEnum) com.google.ads.googleads.v20.errors.ImageErrorEnumOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.errors.ImageErrorProto.internal_static_google_ads_googleads_v20_errors_ImageErrorEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.errors.ImageErrorProto.internal_static_google_ads_googleads_v20_errors_ImageErrorEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.errors.ImageErrorEnum.class, com.google.ads.googleads.v20.errors.ImageErrorEnum.Builder.class); } // Construct using com.google.ads.googleads.v20.errors.ImageErrorEnum.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v20.errors.ImageErrorProto.internal_static_google_ads_googleads_v20_errors_ImageErrorEnum_descriptor; } @java.lang.Override public com.google.ads.googleads.v20.errors.ImageErrorEnum getDefaultInstanceForType() { return com.google.ads.googleads.v20.errors.ImageErrorEnum.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v20.errors.ImageErrorEnum build() { com.google.ads.googleads.v20.errors.ImageErrorEnum result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v20.errors.ImageErrorEnum buildPartial() { com.google.ads.googleads.v20.errors.ImageErrorEnum result = new com.google.ads.googleads.v20.errors.ImageErrorEnum(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v20.errors.ImageErrorEnum) { return mergeFrom((com.google.ads.googleads.v20.errors.ImageErrorEnum)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v20.errors.ImageErrorEnum other) { if (other == com.google.ads.googleads.v20.errors.ImageErrorEnum.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.errors.ImageErrorEnum) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v20.errors.ImageErrorEnum) private static final com.google.ads.googleads.v20.errors.ImageErrorEnum DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v20.errors.ImageErrorEnum(); } public static com.google.ads.googleads.v20.errors.ImageErrorEnum getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ImageErrorEnum> PARSER = new com.google.protobuf.AbstractParser<ImageErrorEnum>() { @java.lang.Override public ImageErrorEnum parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ImageErrorEnum> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ImageErrorEnum> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v20.errors.ImageErrorEnum getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,133
google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/errors/ImageErrorEnum.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v21/errors/image_error.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v21.errors; /** * <pre> * Container for enum describing possible image errors. * </pre> * * Protobuf type {@code google.ads.googleads.v21.errors.ImageErrorEnum} */ public final class ImageErrorEnum extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v21.errors.ImageErrorEnum) ImageErrorEnumOrBuilder { private static final long serialVersionUID = 0L; // Use ImageErrorEnum.newBuilder() to construct. private ImageErrorEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ImageErrorEnum() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ImageErrorEnum(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.errors.ImageErrorProto.internal_static_google_ads_googleads_v21_errors_ImageErrorEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.errors.ImageErrorProto.internal_static_google_ads_googleads_v21_errors_ImageErrorEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.errors.ImageErrorEnum.class, com.google.ads.googleads.v21.errors.ImageErrorEnum.Builder.class); } /** * <pre> * Enum describing possible image errors. * </pre> * * Protobuf enum {@code google.ads.googleads.v21.errors.ImageErrorEnum.ImageError} */ public enum ImageError implements com.google.protobuf.ProtocolMessageEnum { /** * <pre> * Enum unspecified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ UNSPECIFIED(0), /** * <pre> * The received error code is not known in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ UNKNOWN(1), /** * <pre> * The image is not valid. * </pre> * * <code>INVALID_IMAGE = 2;</code> */ INVALID_IMAGE(2), /** * <pre> * The image could not be stored. * </pre> * * <code>STORAGE_ERROR = 3;</code> */ STORAGE_ERROR(3), /** * <pre> * There was a problem with the request. * </pre> * * <code>BAD_REQUEST = 4;</code> */ BAD_REQUEST(4), /** * <pre> * The image is not of legal dimensions. * </pre> * * <code>UNEXPECTED_SIZE = 5;</code> */ UNEXPECTED_SIZE(5), /** * <pre> * Animated image are not permitted. * </pre> * * <code>ANIMATED_NOT_ALLOWED = 6;</code> */ ANIMATED_NOT_ALLOWED(6), /** * <pre> * Animation is too long. * </pre> * * <code>ANIMATION_TOO_LONG = 7;</code> */ ANIMATION_TOO_LONG(7), /** * <pre> * There was an error on the server. * </pre> * * <code>SERVER_ERROR = 8;</code> */ SERVER_ERROR(8), /** * <pre> * Image cannot be in CMYK color format. * </pre> * * <code>CMYK_JPEG_NOT_ALLOWED = 9;</code> */ CMYK_JPEG_NOT_ALLOWED(9), /** * <pre> * Flash images are not permitted. * </pre> * * <code>FLASH_NOT_ALLOWED = 10;</code> */ FLASH_NOT_ALLOWED(10), /** * <pre> * Flash images must support clickTag. * </pre> * * <code>FLASH_WITHOUT_CLICKTAG = 11;</code> */ FLASH_WITHOUT_CLICKTAG(11), /** * <pre> * A flash error has occurred after fixing the click tag. * </pre> * * <code>FLASH_ERROR_AFTER_FIXING_CLICK_TAG = 12;</code> */ FLASH_ERROR_AFTER_FIXING_CLICK_TAG(12), /** * <pre> * Unacceptable visual effects. * </pre> * * <code>ANIMATED_VISUAL_EFFECT = 13;</code> */ ANIMATED_VISUAL_EFFECT(13), /** * <pre> * There was a problem with the flash image. * </pre> * * <code>FLASH_ERROR = 14;</code> */ FLASH_ERROR(14), /** * <pre> * Incorrect image layout. * </pre> * * <code>LAYOUT_PROBLEM = 15;</code> */ LAYOUT_PROBLEM(15), /** * <pre> * There was a problem reading the image file. * </pre> * * <code>PROBLEM_READING_IMAGE_FILE = 16;</code> */ PROBLEM_READING_IMAGE_FILE(16), /** * <pre> * There was an error storing the image. * </pre> * * <code>ERROR_STORING_IMAGE = 17;</code> */ ERROR_STORING_IMAGE(17), /** * <pre> * The aspect ratio of the image is not allowed. * </pre> * * <code>ASPECT_RATIO_NOT_ALLOWED = 18;</code> */ ASPECT_RATIO_NOT_ALLOWED(18), /** * <pre> * Flash cannot have network objects. * </pre> * * <code>FLASH_HAS_NETWORK_OBJECTS = 19;</code> */ FLASH_HAS_NETWORK_OBJECTS(19), /** * <pre> * Flash cannot have network methods. * </pre> * * <code>FLASH_HAS_NETWORK_METHODS = 20;</code> */ FLASH_HAS_NETWORK_METHODS(20), /** * <pre> * Flash cannot have a Url. * </pre> * * <code>FLASH_HAS_URL = 21;</code> */ FLASH_HAS_URL(21), /** * <pre> * Flash cannot use mouse tracking. * </pre> * * <code>FLASH_HAS_MOUSE_TRACKING = 22;</code> */ FLASH_HAS_MOUSE_TRACKING(22), /** * <pre> * Flash cannot have a random number. * </pre> * * <code>FLASH_HAS_RANDOM_NUM = 23;</code> */ FLASH_HAS_RANDOM_NUM(23), /** * <pre> * Ad click target cannot be '_self'. * </pre> * * <code>FLASH_SELF_TARGETS = 24;</code> */ FLASH_SELF_TARGETS(24), /** * <pre> * GetUrl method should only use '_blank'. * </pre> * * <code>FLASH_BAD_GETURL_TARGET = 25;</code> */ FLASH_BAD_GETURL_TARGET(25), /** * <pre> * Flash version is not supported. * </pre> * * <code>FLASH_VERSION_NOT_SUPPORTED = 26;</code> */ FLASH_VERSION_NOT_SUPPORTED(26), /** * <pre> * Flash movies need to have hard coded click URL or clickTAG * </pre> * * <code>FLASH_WITHOUT_HARD_CODED_CLICK_URL = 27;</code> */ FLASH_WITHOUT_HARD_CODED_CLICK_URL(27), /** * <pre> * Uploaded flash file is corrupted. * </pre> * * <code>INVALID_FLASH_FILE = 28;</code> */ INVALID_FLASH_FILE(28), /** * <pre> * Uploaded flash file can be parsed, but the click tag can not be fixed * properly. * </pre> * * <code>FAILED_TO_FIX_CLICK_TAG_IN_FLASH = 29;</code> */ FAILED_TO_FIX_CLICK_TAG_IN_FLASH(29), /** * <pre> * Flash movie accesses network resources * </pre> * * <code>FLASH_ACCESSES_NETWORK_RESOURCES = 30;</code> */ FLASH_ACCESSES_NETWORK_RESOURCES(30), /** * <pre> * Flash movie attempts to call external javascript code * </pre> * * <code>FLASH_EXTERNAL_JS_CALL = 31;</code> */ FLASH_EXTERNAL_JS_CALL(31), /** * <pre> * Flash movie attempts to call flash system commands * </pre> * * <code>FLASH_EXTERNAL_FS_CALL = 32;</code> */ FLASH_EXTERNAL_FS_CALL(32), /** * <pre> * Image file is too large. * </pre> * * <code>FILE_TOO_LARGE = 33;</code> */ FILE_TOO_LARGE(33), /** * <pre> * Image data is too large. * </pre> * * <code>IMAGE_DATA_TOO_LARGE = 34;</code> */ IMAGE_DATA_TOO_LARGE(34), /** * <pre> * Error while processing the image. * </pre> * * <code>IMAGE_PROCESSING_ERROR = 35;</code> */ IMAGE_PROCESSING_ERROR(35), /** * <pre> * Image is too small. * </pre> * * <code>IMAGE_TOO_SMALL = 36;</code> */ IMAGE_TOO_SMALL(36), /** * <pre> * Input was invalid. * </pre> * * <code>INVALID_INPUT = 37;</code> */ INVALID_INPUT(37), /** * <pre> * There was a problem reading the image file. * </pre> * * <code>PROBLEM_READING_FILE = 38;</code> */ PROBLEM_READING_FILE(38), /** * <pre> * Image constraints are violated, but details like ASPECT_RATIO_NOT_ALLOWED * can't be provided. This happens when asset spec contains more than one * constraint and different criteria of different constraints are violated. * </pre> * * <code>IMAGE_CONSTRAINTS_VIOLATED = 39;</code> */ IMAGE_CONSTRAINTS_VIOLATED(39), /** * <pre> * Image format is not allowed. * </pre> * * <code>FORMAT_NOT_ALLOWED = 40;</code> */ FORMAT_NOT_ALLOWED(40), UNRECOGNIZED(-1), ; /** * <pre> * Enum unspecified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ public static final int UNSPECIFIED_VALUE = 0; /** * <pre> * The received error code is not known in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ public static final int UNKNOWN_VALUE = 1; /** * <pre> * The image is not valid. * </pre> * * <code>INVALID_IMAGE = 2;</code> */ public static final int INVALID_IMAGE_VALUE = 2; /** * <pre> * The image could not be stored. * </pre> * * <code>STORAGE_ERROR = 3;</code> */ public static final int STORAGE_ERROR_VALUE = 3; /** * <pre> * There was a problem with the request. * </pre> * * <code>BAD_REQUEST = 4;</code> */ public static final int BAD_REQUEST_VALUE = 4; /** * <pre> * The image is not of legal dimensions. * </pre> * * <code>UNEXPECTED_SIZE = 5;</code> */ public static final int UNEXPECTED_SIZE_VALUE = 5; /** * <pre> * Animated image are not permitted. * </pre> * * <code>ANIMATED_NOT_ALLOWED = 6;</code> */ public static final int ANIMATED_NOT_ALLOWED_VALUE = 6; /** * <pre> * Animation is too long. * </pre> * * <code>ANIMATION_TOO_LONG = 7;</code> */ public static final int ANIMATION_TOO_LONG_VALUE = 7; /** * <pre> * There was an error on the server. * </pre> * * <code>SERVER_ERROR = 8;</code> */ public static final int SERVER_ERROR_VALUE = 8; /** * <pre> * Image cannot be in CMYK color format. * </pre> * * <code>CMYK_JPEG_NOT_ALLOWED = 9;</code> */ public static final int CMYK_JPEG_NOT_ALLOWED_VALUE = 9; /** * <pre> * Flash images are not permitted. * </pre> * * <code>FLASH_NOT_ALLOWED = 10;</code> */ public static final int FLASH_NOT_ALLOWED_VALUE = 10; /** * <pre> * Flash images must support clickTag. * </pre> * * <code>FLASH_WITHOUT_CLICKTAG = 11;</code> */ public static final int FLASH_WITHOUT_CLICKTAG_VALUE = 11; /** * <pre> * A flash error has occurred after fixing the click tag. * </pre> * * <code>FLASH_ERROR_AFTER_FIXING_CLICK_TAG = 12;</code> */ public static final int FLASH_ERROR_AFTER_FIXING_CLICK_TAG_VALUE = 12; /** * <pre> * Unacceptable visual effects. * </pre> * * <code>ANIMATED_VISUAL_EFFECT = 13;</code> */ public static final int ANIMATED_VISUAL_EFFECT_VALUE = 13; /** * <pre> * There was a problem with the flash image. * </pre> * * <code>FLASH_ERROR = 14;</code> */ public static final int FLASH_ERROR_VALUE = 14; /** * <pre> * Incorrect image layout. * </pre> * * <code>LAYOUT_PROBLEM = 15;</code> */ public static final int LAYOUT_PROBLEM_VALUE = 15; /** * <pre> * There was a problem reading the image file. * </pre> * * <code>PROBLEM_READING_IMAGE_FILE = 16;</code> */ public static final int PROBLEM_READING_IMAGE_FILE_VALUE = 16; /** * <pre> * There was an error storing the image. * </pre> * * <code>ERROR_STORING_IMAGE = 17;</code> */ public static final int ERROR_STORING_IMAGE_VALUE = 17; /** * <pre> * The aspect ratio of the image is not allowed. * </pre> * * <code>ASPECT_RATIO_NOT_ALLOWED = 18;</code> */ public static final int ASPECT_RATIO_NOT_ALLOWED_VALUE = 18; /** * <pre> * Flash cannot have network objects. * </pre> * * <code>FLASH_HAS_NETWORK_OBJECTS = 19;</code> */ public static final int FLASH_HAS_NETWORK_OBJECTS_VALUE = 19; /** * <pre> * Flash cannot have network methods. * </pre> * * <code>FLASH_HAS_NETWORK_METHODS = 20;</code> */ public static final int FLASH_HAS_NETWORK_METHODS_VALUE = 20; /** * <pre> * Flash cannot have a Url. * </pre> * * <code>FLASH_HAS_URL = 21;</code> */ public static final int FLASH_HAS_URL_VALUE = 21; /** * <pre> * Flash cannot use mouse tracking. * </pre> * * <code>FLASH_HAS_MOUSE_TRACKING = 22;</code> */ public static final int FLASH_HAS_MOUSE_TRACKING_VALUE = 22; /** * <pre> * Flash cannot have a random number. * </pre> * * <code>FLASH_HAS_RANDOM_NUM = 23;</code> */ public static final int FLASH_HAS_RANDOM_NUM_VALUE = 23; /** * <pre> * Ad click target cannot be '_self'. * </pre> * * <code>FLASH_SELF_TARGETS = 24;</code> */ public static final int FLASH_SELF_TARGETS_VALUE = 24; /** * <pre> * GetUrl method should only use '_blank'. * </pre> * * <code>FLASH_BAD_GETURL_TARGET = 25;</code> */ public static final int FLASH_BAD_GETURL_TARGET_VALUE = 25; /** * <pre> * Flash version is not supported. * </pre> * * <code>FLASH_VERSION_NOT_SUPPORTED = 26;</code> */ public static final int FLASH_VERSION_NOT_SUPPORTED_VALUE = 26; /** * <pre> * Flash movies need to have hard coded click URL or clickTAG * </pre> * * <code>FLASH_WITHOUT_HARD_CODED_CLICK_URL = 27;</code> */ public static final int FLASH_WITHOUT_HARD_CODED_CLICK_URL_VALUE = 27; /** * <pre> * Uploaded flash file is corrupted. * </pre> * * <code>INVALID_FLASH_FILE = 28;</code> */ public static final int INVALID_FLASH_FILE_VALUE = 28; /** * <pre> * Uploaded flash file can be parsed, but the click tag can not be fixed * properly. * </pre> * * <code>FAILED_TO_FIX_CLICK_TAG_IN_FLASH = 29;</code> */ public static final int FAILED_TO_FIX_CLICK_TAG_IN_FLASH_VALUE = 29; /** * <pre> * Flash movie accesses network resources * </pre> * * <code>FLASH_ACCESSES_NETWORK_RESOURCES = 30;</code> */ public static final int FLASH_ACCESSES_NETWORK_RESOURCES_VALUE = 30; /** * <pre> * Flash movie attempts to call external javascript code * </pre> * * <code>FLASH_EXTERNAL_JS_CALL = 31;</code> */ public static final int FLASH_EXTERNAL_JS_CALL_VALUE = 31; /** * <pre> * Flash movie attempts to call flash system commands * </pre> * * <code>FLASH_EXTERNAL_FS_CALL = 32;</code> */ public static final int FLASH_EXTERNAL_FS_CALL_VALUE = 32; /** * <pre> * Image file is too large. * </pre> * * <code>FILE_TOO_LARGE = 33;</code> */ public static final int FILE_TOO_LARGE_VALUE = 33; /** * <pre> * Image data is too large. * </pre> * * <code>IMAGE_DATA_TOO_LARGE = 34;</code> */ public static final int IMAGE_DATA_TOO_LARGE_VALUE = 34; /** * <pre> * Error while processing the image. * </pre> * * <code>IMAGE_PROCESSING_ERROR = 35;</code> */ public static final int IMAGE_PROCESSING_ERROR_VALUE = 35; /** * <pre> * Image is too small. * </pre> * * <code>IMAGE_TOO_SMALL = 36;</code> */ public static final int IMAGE_TOO_SMALL_VALUE = 36; /** * <pre> * Input was invalid. * </pre> * * <code>INVALID_INPUT = 37;</code> */ public static final int INVALID_INPUT_VALUE = 37; /** * <pre> * There was a problem reading the image file. * </pre> * * <code>PROBLEM_READING_FILE = 38;</code> */ public static final int PROBLEM_READING_FILE_VALUE = 38; /** * <pre> * Image constraints are violated, but details like ASPECT_RATIO_NOT_ALLOWED * can't be provided. This happens when asset spec contains more than one * constraint and different criteria of different constraints are violated. * </pre> * * <code>IMAGE_CONSTRAINTS_VIOLATED = 39;</code> */ public static final int IMAGE_CONSTRAINTS_VIOLATED_VALUE = 39; /** * <pre> * Image format is not allowed. * </pre> * * <code>FORMAT_NOT_ALLOWED = 40;</code> */ public static final int FORMAT_NOT_ALLOWED_VALUE = 40; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static ImageError valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static ImageError forNumber(int value) { switch (value) { case 0: return UNSPECIFIED; case 1: return UNKNOWN; case 2: return INVALID_IMAGE; case 3: return STORAGE_ERROR; case 4: return BAD_REQUEST; case 5: return UNEXPECTED_SIZE; case 6: return ANIMATED_NOT_ALLOWED; case 7: return ANIMATION_TOO_LONG; case 8: return SERVER_ERROR; case 9: return CMYK_JPEG_NOT_ALLOWED; case 10: return FLASH_NOT_ALLOWED; case 11: return FLASH_WITHOUT_CLICKTAG; case 12: return FLASH_ERROR_AFTER_FIXING_CLICK_TAG; case 13: return ANIMATED_VISUAL_EFFECT; case 14: return FLASH_ERROR; case 15: return LAYOUT_PROBLEM; case 16: return PROBLEM_READING_IMAGE_FILE; case 17: return ERROR_STORING_IMAGE; case 18: return ASPECT_RATIO_NOT_ALLOWED; case 19: return FLASH_HAS_NETWORK_OBJECTS; case 20: return FLASH_HAS_NETWORK_METHODS; case 21: return FLASH_HAS_URL; case 22: return FLASH_HAS_MOUSE_TRACKING; case 23: return FLASH_HAS_RANDOM_NUM; case 24: return FLASH_SELF_TARGETS; case 25: return FLASH_BAD_GETURL_TARGET; case 26: return FLASH_VERSION_NOT_SUPPORTED; case 27: return FLASH_WITHOUT_HARD_CODED_CLICK_URL; case 28: return INVALID_FLASH_FILE; case 29: return FAILED_TO_FIX_CLICK_TAG_IN_FLASH; case 30: return FLASH_ACCESSES_NETWORK_RESOURCES; case 31: return FLASH_EXTERNAL_JS_CALL; case 32: return FLASH_EXTERNAL_FS_CALL; case 33: return FILE_TOO_LARGE; case 34: return IMAGE_DATA_TOO_LARGE; case 35: return IMAGE_PROCESSING_ERROR; case 36: return IMAGE_TOO_SMALL; case 37: return INVALID_INPUT; case 38: return PROBLEM_READING_FILE; case 39: return IMAGE_CONSTRAINTS_VIOLATED; case 40: return FORMAT_NOT_ALLOWED; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<ImageError> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap< ImageError> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<ImageError>() { public ImageError findValueByNumber(int number) { return ImageError.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.ads.googleads.v21.errors.ImageErrorEnum.getDescriptor().getEnumTypes().get(0); } private static final ImageError[] VALUES = values(); public static ImageError valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private ImageError(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.ads.googleads.v21.errors.ImageErrorEnum.ImageError) } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v21.errors.ImageErrorEnum)) { return super.equals(obj); } com.google.ads.googleads.v21.errors.ImageErrorEnum other = (com.google.ads.googleads.v21.errors.ImageErrorEnum) obj; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v21.errors.ImageErrorEnum parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.errors.ImageErrorEnum parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.errors.ImageErrorEnum parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.errors.ImageErrorEnum parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.errors.ImageErrorEnum parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.errors.ImageErrorEnum parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.errors.ImageErrorEnum parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.errors.ImageErrorEnum parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.errors.ImageErrorEnum parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.errors.ImageErrorEnum parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.errors.ImageErrorEnum parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.errors.ImageErrorEnum parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v21.errors.ImageErrorEnum prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Container for enum describing possible image errors. * </pre> * * Protobuf type {@code google.ads.googleads.v21.errors.ImageErrorEnum} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.errors.ImageErrorEnum) com.google.ads.googleads.v21.errors.ImageErrorEnumOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.errors.ImageErrorProto.internal_static_google_ads_googleads_v21_errors_ImageErrorEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.errors.ImageErrorProto.internal_static_google_ads_googleads_v21_errors_ImageErrorEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.errors.ImageErrorEnum.class, com.google.ads.googleads.v21.errors.ImageErrorEnum.Builder.class); } // Construct using com.google.ads.googleads.v21.errors.ImageErrorEnum.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v21.errors.ImageErrorProto.internal_static_google_ads_googleads_v21_errors_ImageErrorEnum_descriptor; } @java.lang.Override public com.google.ads.googleads.v21.errors.ImageErrorEnum getDefaultInstanceForType() { return com.google.ads.googleads.v21.errors.ImageErrorEnum.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v21.errors.ImageErrorEnum build() { com.google.ads.googleads.v21.errors.ImageErrorEnum result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v21.errors.ImageErrorEnum buildPartial() { com.google.ads.googleads.v21.errors.ImageErrorEnum result = new com.google.ads.googleads.v21.errors.ImageErrorEnum(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v21.errors.ImageErrorEnum) { return mergeFrom((com.google.ads.googleads.v21.errors.ImageErrorEnum)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v21.errors.ImageErrorEnum other) { if (other == com.google.ads.googleads.v21.errors.ImageErrorEnum.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.errors.ImageErrorEnum) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v21.errors.ImageErrorEnum) private static final com.google.ads.googleads.v21.errors.ImageErrorEnum DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v21.errors.ImageErrorEnum(); } public static com.google.ads.googleads.v21.errors.ImageErrorEnum getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ImageErrorEnum> PARSER = new com.google.protobuf.AbstractParser<ImageErrorEnum>() { @java.lang.Override public ImageErrorEnum parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ImageErrorEnum> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ImageErrorEnum> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v21.errors.ImageErrorEnum getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,190
java-bigquerydatapolicy/proto-google-cloud-bigquerydatapolicy-v2/src/main/java/com/google/cloud/bigquery/datapolicies/v2/AddGranteesRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/bigquery/datapolicies/v2/datapolicy.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.bigquery.datapolicies.v2; /** * * * <pre> * Request message for the AddGrantees method. * </pre> * * Protobuf type {@code google.cloud.bigquery.datapolicies.v2.AddGranteesRequest} */ public final class AddGranteesRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.bigquery.datapolicies.v2.AddGranteesRequest) AddGranteesRequestOrBuilder { private static final long serialVersionUID = 0L; // Use AddGranteesRequest.newBuilder() to construct. private AddGranteesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private AddGranteesRequest() { dataPolicy_ = ""; grantees_ = com.google.protobuf.LazyStringArrayList.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new AddGranteesRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.bigquery.datapolicies.v2.DataPolicyProto .internal_static_google_cloud_bigquery_datapolicies_v2_AddGranteesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.bigquery.datapolicies.v2.DataPolicyProto .internal_static_google_cloud_bigquery_datapolicies_v2_AddGranteesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest.class, com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest.Builder.class); } public static final int DATA_POLICY_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object dataPolicy_ = ""; /** * * * <pre> * Required. Resource name of this data policy, in the format of * `projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}`. * </pre> * * <code> * string data_policy = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The dataPolicy. */ @java.lang.Override public java.lang.String getDataPolicy() { java.lang.Object ref = dataPolicy_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); dataPolicy_ = s; return s; } } /** * * * <pre> * Required. Resource name of this data policy, in the format of * `projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}`. * </pre> * * <code> * string data_policy = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for dataPolicy. */ @java.lang.Override public com.google.protobuf.ByteString getDataPolicyBytes() { java.lang.Object ref = dataPolicy_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); dataPolicy_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int GRANTEES_FIELD_NUMBER = 2; @SuppressWarnings("serial") private com.google.protobuf.LazyStringArrayList grantees_ = com.google.protobuf.LazyStringArrayList.emptyList(); /** * * * <pre> * Required. IAM principal that should be granted Fine Grained Access to the * underlying data goverened by the data policy. The target data policy is * determined by the `data_policy` field. * * Uses the [IAM V2 principal * syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2). * Supported principal types: * * * User * * Group * * Service account * </pre> * * <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return A list containing the grantees. */ public com.google.protobuf.ProtocolStringList getGranteesList() { return grantees_; } /** * * * <pre> * Required. IAM principal that should be granted Fine Grained Access to the * underlying data goverened by the data policy. The target data policy is * determined by the `data_policy` field. * * Uses the [IAM V2 principal * syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2). * Supported principal types: * * * User * * Group * * Service account * </pre> * * <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The count of grantees. */ public int getGranteesCount() { return grantees_.size(); } /** * * * <pre> * Required. IAM principal that should be granted Fine Grained Access to the * underlying data goverened by the data policy. The target data policy is * determined by the `data_policy` field. * * Uses the [IAM V2 principal * syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2). * Supported principal types: * * * User * * Group * * Service account * </pre> * * <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param index The index of the element to return. * @return The grantees at the given index. */ public java.lang.String getGrantees(int index) { return grantees_.get(index); } /** * * * <pre> * Required. IAM principal that should be granted Fine Grained Access to the * underlying data goverened by the data policy. The target data policy is * determined by the `data_policy` field. * * Uses the [IAM V2 principal * syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2). * Supported principal types: * * * User * * Group * * Service account * </pre> * * <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param index The index of the value to return. * @return The bytes of the grantees at the given index. */ public com.google.protobuf.ByteString getGranteesBytes(int index) { return grantees_.getByteString(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(dataPolicy_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, dataPolicy_); } for (int i = 0; i < grantees_.size(); i++) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, grantees_.getRaw(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(dataPolicy_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, dataPolicy_); } { int dataSize = 0; for (int i = 0; i < grantees_.size(); i++) { dataSize += computeStringSizeNoTag(grantees_.getRaw(i)); } size += dataSize; size += 1 * getGranteesList().size(); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest)) { return super.equals(obj); } com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest other = (com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest) obj; if (!getDataPolicy().equals(other.getDataPolicy())) return false; if (!getGranteesList().equals(other.getGranteesList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + DATA_POLICY_FIELD_NUMBER; hash = (53 * hash) + getDataPolicy().hashCode(); if (getGranteesCount() > 0) { hash = (37 * hash) + GRANTEES_FIELD_NUMBER; hash = (53 * hash) + getGranteesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for the AddGrantees method. * </pre> * * Protobuf type {@code google.cloud.bigquery.datapolicies.v2.AddGranteesRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.datapolicies.v2.AddGranteesRequest) com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.bigquery.datapolicies.v2.DataPolicyProto .internal_static_google_cloud_bigquery_datapolicies_v2_AddGranteesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.bigquery.datapolicies.v2.DataPolicyProto .internal_static_google_cloud_bigquery_datapolicies_v2_AddGranteesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest.class, com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest.Builder.class); } // Construct using com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; dataPolicy_ = ""; grantees_ = com.google.protobuf.LazyStringArrayList.emptyList(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.bigquery.datapolicies.v2.DataPolicyProto .internal_static_google_cloud_bigquery_datapolicies_v2_AddGranteesRequest_descriptor; } @java.lang.Override public com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest getDefaultInstanceForType() { return com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest build() { com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest buildPartial() { com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest result = new com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.dataPolicy_ = dataPolicy_; } if (((from_bitField0_ & 0x00000002) != 0)) { grantees_.makeImmutable(); result.grantees_ = grantees_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest) { return mergeFrom((com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest other) { if (other == com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest.getDefaultInstance()) return this; if (!other.getDataPolicy().isEmpty()) { dataPolicy_ = other.dataPolicy_; bitField0_ |= 0x00000001; onChanged(); } if (!other.grantees_.isEmpty()) { if (grantees_.isEmpty()) { grantees_ = other.grantees_; bitField0_ |= 0x00000002; } else { ensureGranteesIsMutable(); grantees_.addAll(other.grantees_); } onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { dataPolicy_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { java.lang.String s = input.readStringRequireUtf8(); ensureGranteesIsMutable(); grantees_.add(s); break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object dataPolicy_ = ""; /** * * * <pre> * Required. Resource name of this data policy, in the format of * `projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}`. * </pre> * * <code> * string data_policy = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The dataPolicy. */ public java.lang.String getDataPolicy() { java.lang.Object ref = dataPolicy_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); dataPolicy_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Resource name of this data policy, in the format of * `projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}`. * </pre> * * <code> * string data_policy = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for dataPolicy. */ public com.google.protobuf.ByteString getDataPolicyBytes() { java.lang.Object ref = dataPolicy_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); dataPolicy_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Resource name of this data policy, in the format of * `projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}`. * </pre> * * <code> * string data_policy = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The dataPolicy to set. * @return This builder for chaining. */ public Builder setDataPolicy(java.lang.String value) { if (value == null) { throw new NullPointerException(); } dataPolicy_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Resource name of this data policy, in the format of * `projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}`. * </pre> * * <code> * string data_policy = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearDataPolicy() { dataPolicy_ = getDefaultInstance().getDataPolicy(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. Resource name of this data policy, in the format of * `projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}`. * </pre> * * <code> * string data_policy = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for dataPolicy to set. * @return This builder for chaining. */ public Builder setDataPolicyBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); dataPolicy_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.protobuf.LazyStringArrayList grantees_ = com.google.protobuf.LazyStringArrayList.emptyList(); private void ensureGranteesIsMutable() { if (!grantees_.isModifiable()) { grantees_ = new com.google.protobuf.LazyStringArrayList(grantees_); } bitField0_ |= 0x00000002; } /** * * * <pre> * Required. IAM principal that should be granted Fine Grained Access to the * underlying data goverened by the data policy. The target data policy is * determined by the `data_policy` field. * * Uses the [IAM V2 principal * syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2). * Supported principal types: * * * User * * Group * * Service account * </pre> * * <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return A list containing the grantees. */ public com.google.protobuf.ProtocolStringList getGranteesList() { grantees_.makeImmutable(); return grantees_; } /** * * * <pre> * Required. IAM principal that should be granted Fine Grained Access to the * underlying data goverened by the data policy. The target data policy is * determined by the `data_policy` field. * * Uses the [IAM V2 principal * syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2). * Supported principal types: * * * User * * Group * * Service account * </pre> * * <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The count of grantees. */ public int getGranteesCount() { return grantees_.size(); } /** * * * <pre> * Required. IAM principal that should be granted Fine Grained Access to the * underlying data goverened by the data policy. The target data policy is * determined by the `data_policy` field. * * Uses the [IAM V2 principal * syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2). * Supported principal types: * * * User * * Group * * Service account * </pre> * * <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param index The index of the element to return. * @return The grantees at the given index. */ public java.lang.String getGrantees(int index) { return grantees_.get(index); } /** * * * <pre> * Required. IAM principal that should be granted Fine Grained Access to the * underlying data goverened by the data policy. The target data policy is * determined by the `data_policy` field. * * Uses the [IAM V2 principal * syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2). * Supported principal types: * * * User * * Group * * Service account * </pre> * * <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param index The index of the value to return. * @return The bytes of the grantees at the given index. */ public com.google.protobuf.ByteString getGranteesBytes(int index) { return grantees_.getByteString(index); } /** * * * <pre> * Required. IAM principal that should be granted Fine Grained Access to the * underlying data goverened by the data policy. The target data policy is * determined by the `data_policy` field. * * Uses the [IAM V2 principal * syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2). * Supported principal types: * * * User * * Group * * Service account * </pre> * * <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param index The index to set the value at. * @param value The grantees to set. * @return This builder for chaining. */ public Builder setGrantees(int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureGranteesIsMutable(); grantees_.set(index, value); bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. IAM principal that should be granted Fine Grained Access to the * underlying data goverened by the data policy. The target data policy is * determined by the `data_policy` field. * * Uses the [IAM V2 principal * syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2). * Supported principal types: * * * User * * Group * * Service account * </pre> * * <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The grantees to add. * @return This builder for chaining. */ public Builder addGrantees(java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureGranteesIsMutable(); grantees_.add(value); bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. IAM principal that should be granted Fine Grained Access to the * underlying data goverened by the data policy. The target data policy is * determined by the `data_policy` field. * * Uses the [IAM V2 principal * syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2). * Supported principal types: * * * User * * Group * * Service account * </pre> * * <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param values The grantees to add. * @return This builder for chaining. */ public Builder addAllGrantees(java.lang.Iterable<java.lang.String> values) { ensureGranteesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, grantees_); bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. IAM principal that should be granted Fine Grained Access to the * underlying data goverened by the data policy. The target data policy is * determined by the `data_policy` field. * * Uses the [IAM V2 principal * syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2). * Supported principal types: * * * User * * Group * * Service account * </pre> * * <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearGrantees() { grantees_ = com.google.protobuf.LazyStringArrayList.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); ; onChanged(); return this; } /** * * * <pre> * Required. IAM principal that should be granted Fine Grained Access to the * underlying data goverened by the data policy. The target data policy is * determined by the `data_policy` field. * * Uses the [IAM V2 principal * syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2). * Supported principal types: * * * User * * Group * * Service account * </pre> * * <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes of the grantees to add. * @return This builder for chaining. */ public Builder addGranteesBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); ensureGranteesIsMutable(); grantees_.add(value); bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.datapolicies.v2.AddGranteesRequest) } // @@protoc_insertion_point(class_scope:google.cloud.bigquery.datapolicies.v2.AddGranteesRequest) private static final com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest(); } public static com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<AddGranteesRequest> PARSER = new com.google.protobuf.AbstractParser<AddGranteesRequest>() { @java.lang.Override public AddGranteesRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<AddGranteesRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<AddGranteesRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.bigquery.datapolicies.v2.AddGranteesRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,201
java-batch/proto-google-cloud-batch-v1/src/main/java/com/google/cloud/batch/v1/DeleteJobRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/batch/v1/batch.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.batch.v1; /** * * * <pre> * DeleteJob Request. * </pre> * * Protobuf type {@code google.cloud.batch.v1.DeleteJobRequest} */ public final class DeleteJobRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.batch.v1.DeleteJobRequest) DeleteJobRequestOrBuilder { private static final long serialVersionUID = 0L; // Use DeleteJobRequest.newBuilder() to construct. private DeleteJobRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DeleteJobRequest() { name_ = ""; reason_ = ""; requestId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new DeleteJobRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.batch.v1.BatchProto .internal_static_google_cloud_batch_v1_DeleteJobRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.batch.v1.BatchProto .internal_static_google_cloud_batch_v1_DeleteJobRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.batch.v1.DeleteJobRequest.class, com.google.cloud.batch.v1.DeleteJobRequest.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * Job name. * </pre> * * <code>string name = 1;</code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Job name. * </pre> * * <code>string name = 1;</code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int REASON_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object reason_ = ""; /** * * * <pre> * Optional. Reason for this deletion. * </pre> * * <code>string reason = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The reason. */ @java.lang.Override public java.lang.String getReason() { java.lang.Object ref = reason_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); reason_ = s; return s; } } /** * * * <pre> * Optional. Reason for this deletion. * </pre> * * <code>string reason = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for reason. */ @java.lang.Override public com.google.protobuf.ByteString getReasonBytes() { java.lang.Object ref = reason_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); reason_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int REQUEST_ID_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The requestId. */ @java.lang.Override public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for requestId. */ @java.lang.Override public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(reason_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, reason_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, requestId_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(reason_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, reason_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, requestId_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.batch.v1.DeleteJobRequest)) { return super.equals(obj); } com.google.cloud.batch.v1.DeleteJobRequest other = (com.google.cloud.batch.v1.DeleteJobRequest) obj; if (!getName().equals(other.getName())) return false; if (!getReason().equals(other.getReason())) return false; if (!getRequestId().equals(other.getRequestId())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (37 * hash) + REASON_FIELD_NUMBER; hash = (53 * hash) + getReason().hashCode(); hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER; hash = (53 * hash) + getRequestId().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.batch.v1.DeleteJobRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.batch.v1.DeleteJobRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.batch.v1.DeleteJobRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.batch.v1.DeleteJobRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.batch.v1.DeleteJobRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.batch.v1.DeleteJobRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.batch.v1.DeleteJobRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.batch.v1.DeleteJobRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.batch.v1.DeleteJobRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.batch.v1.DeleteJobRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.batch.v1.DeleteJobRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.batch.v1.DeleteJobRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.batch.v1.DeleteJobRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * DeleteJob Request. * </pre> * * Protobuf type {@code google.cloud.batch.v1.DeleteJobRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.batch.v1.DeleteJobRequest) com.google.cloud.batch.v1.DeleteJobRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.batch.v1.BatchProto .internal_static_google_cloud_batch_v1_DeleteJobRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.batch.v1.BatchProto .internal_static_google_cloud_batch_v1_DeleteJobRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.batch.v1.DeleteJobRequest.class, com.google.cloud.batch.v1.DeleteJobRequest.Builder.class); } // Construct using com.google.cloud.batch.v1.DeleteJobRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; reason_ = ""; requestId_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.batch.v1.BatchProto .internal_static_google_cloud_batch_v1_DeleteJobRequest_descriptor; } @java.lang.Override public com.google.cloud.batch.v1.DeleteJobRequest getDefaultInstanceForType() { return com.google.cloud.batch.v1.DeleteJobRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.batch.v1.DeleteJobRequest build() { com.google.cloud.batch.v1.DeleteJobRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.batch.v1.DeleteJobRequest buildPartial() { com.google.cloud.batch.v1.DeleteJobRequest result = new com.google.cloud.batch.v1.DeleteJobRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.batch.v1.DeleteJobRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.reason_ = reason_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.requestId_ = requestId_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.batch.v1.DeleteJobRequest) { return mergeFrom((com.google.cloud.batch.v1.DeleteJobRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.batch.v1.DeleteJobRequest other) { if (other == com.google.cloud.batch.v1.DeleteJobRequest.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getReason().isEmpty()) { reason_ = other.reason_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getRequestId().isEmpty()) { requestId_ = other.requestId_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { reason_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 34: { requestId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * * * <pre> * Job name. * </pre> * * <code>string name = 1;</code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Job name. * </pre> * * <code>string name = 1;</code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Job name. * </pre> * * <code>string name = 1;</code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Job name. * </pre> * * <code>string name = 1;</code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Job name. * </pre> * * <code>string name = 1;</code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object reason_ = ""; /** * * * <pre> * Optional. Reason for this deletion. * </pre> * * <code>string reason = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The reason. */ public java.lang.String getReason() { java.lang.Object ref = reason_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); reason_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. Reason for this deletion. * </pre> * * <code>string reason = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for reason. */ public com.google.protobuf.ByteString getReasonBytes() { java.lang.Object ref = reason_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); reason_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. Reason for this deletion. * </pre> * * <code>string reason = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The reason to set. * @return This builder for chaining. */ public Builder setReason(java.lang.String value) { if (value == null) { throw new NullPointerException(); } reason_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Reason for this deletion. * </pre> * * <code>string reason = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearReason() { reason_ = getDefaultInstance().getReason(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Optional. Reason for this deletion. * </pre> * * <code>string reason = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for reason to set. * @return This builder for chaining. */ public Builder setReasonBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); reason_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The requestId. */ public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for requestId. */ public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The requestId to set. * @return This builder for chaining. */ public Builder setRequestId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } requestId_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearRequestId() { requestId_ = getDefaultInstance().getRequestId(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for requestId to set. * @return This builder for chaining. */ public Builder setRequestIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); requestId_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.batch.v1.DeleteJobRequest) } // @@protoc_insertion_point(class_scope:google.cloud.batch.v1.DeleteJobRequest) private static final com.google.cloud.batch.v1.DeleteJobRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.batch.v1.DeleteJobRequest(); } public static com.google.cloud.batch.v1.DeleteJobRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DeleteJobRequest> PARSER = new com.google.protobuf.AbstractParser<DeleteJobRequest>() { @java.lang.Override public DeleteJobRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<DeleteJobRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DeleteJobRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.batch.v1.DeleteJobRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,214
java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/ReservationBlockPhysicalTopologyInstance.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/compute/v1/compute.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.compute.v1; /** * * * <pre> * The instances information for a given Block * </pre> * * Protobuf type {@code google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance} */ public final class ReservationBlockPhysicalTopologyInstance extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance) ReservationBlockPhysicalTopologyInstanceOrBuilder { private static final long serialVersionUID = 0L; // Use ReservationBlockPhysicalTopologyInstance.newBuilder() to construct. private ReservationBlockPhysicalTopologyInstance( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ReservationBlockPhysicalTopologyInstance() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ReservationBlockPhysicalTopologyInstance(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_ReservationBlockPhysicalTopologyInstance_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_ReservationBlockPhysicalTopologyInstance_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance.class, com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance.Builder.class); } private int bitField0_; public static final int INSTANCE_ID_FIELD_NUMBER = 45488389; private long instanceId_ = 0L; /** * * * <pre> * The InstanceId of the instance * </pre> * * <code>optional uint64 instance_id = 45488389;</code> * * @return Whether the instanceId field is set. */ @java.lang.Override public boolean hasInstanceId() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The InstanceId of the instance * </pre> * * <code>optional uint64 instance_id = 45488389;</code> * * @return The instanceId. */ @java.lang.Override public long getInstanceId() { return instanceId_; } public static final int PHYSICAL_HOST_TOPOLOGY_FIELD_NUMBER = 390842814; private com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstancePhysicalHostTopology physicalHostTopology_; /** * * * <pre> * The PhysicalHostTopology of instances within a Block resource. * </pre> * * <code> * optional .google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstancePhysicalHostTopology physical_host_topology = 390842814; * </code> * * @return Whether the physicalHostTopology field is set. */ @java.lang.Override public boolean hasPhysicalHostTopology() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The PhysicalHostTopology of instances within a Block resource. * </pre> * * <code> * optional .google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstancePhysicalHostTopology physical_host_topology = 390842814; * </code> * * @return The physicalHostTopology. */ @java.lang.Override public com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstancePhysicalHostTopology getPhysicalHostTopology() { return physicalHostTopology_ == null ? com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstancePhysicalHostTopology .getDefaultInstance() : physicalHostTopology_; } /** * * * <pre> * The PhysicalHostTopology of instances within a Block resource. * </pre> * * <code> * optional .google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstancePhysicalHostTopology physical_host_topology = 390842814; * </code> */ @java.lang.Override public com.google.cloud.compute.v1 .ReservationBlockPhysicalTopologyInstancePhysicalHostTopologyOrBuilder getPhysicalHostTopologyOrBuilder() { return physicalHostTopology_ == null ? com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstancePhysicalHostTopology .getDefaultInstance() : physicalHostTopology_; } public static final int PROJECT_ID_FIELD_NUMBER = 177513473; private long projectId_ = 0L; /** * * * <pre> * Project where the instance lives * </pre> * * <code>optional uint64 project_id = 177513473;</code> * * @return Whether the projectId field is set. */ @java.lang.Override public boolean hasProjectId() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * Project where the instance lives * </pre> * * <code>optional uint64 project_id = 177513473;</code> * * @return The projectId. */ @java.lang.Override public long getProjectId() { return projectId_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeUInt64(45488389, instanceId_); } if (((bitField0_ & 0x00000004) != 0)) { output.writeUInt64(177513473, projectId_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(390842814, getPhysicalHostTopology()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeUInt64Size(45488389, instanceId_); } if (((bitField0_ & 0x00000004) != 0)) { size += com.google.protobuf.CodedOutputStream.computeUInt64Size(177513473, projectId_); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 390842814, getPhysicalHostTopology()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance)) { return super.equals(obj); } com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance other = (com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance) obj; if (hasInstanceId() != other.hasInstanceId()) return false; if (hasInstanceId()) { if (getInstanceId() != other.getInstanceId()) return false; } if (hasPhysicalHostTopology() != other.hasPhysicalHostTopology()) return false; if (hasPhysicalHostTopology()) { if (!getPhysicalHostTopology().equals(other.getPhysicalHostTopology())) return false; } if (hasProjectId() != other.hasProjectId()) return false; if (hasProjectId()) { if (getProjectId() != other.getProjectId()) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasInstanceId()) { hash = (37 * hash) + INSTANCE_ID_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getInstanceId()); } if (hasPhysicalHostTopology()) { hash = (37 * hash) + PHYSICAL_HOST_TOPOLOGY_FIELD_NUMBER; hash = (53 * hash) + getPhysicalHostTopology().hashCode(); } if (hasProjectId()) { hash = (37 * hash) + PROJECT_ID_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getProjectId()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The instances information for a given Block * </pre> * * Protobuf type {@code google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance) com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstanceOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_ReservationBlockPhysicalTopologyInstance_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_ReservationBlockPhysicalTopologyInstance_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance.class, com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance.Builder.class); } // Construct using // com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getPhysicalHostTopologyFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; instanceId_ = 0L; physicalHostTopology_ = null; if (physicalHostTopologyBuilder_ != null) { physicalHostTopologyBuilder_.dispose(); physicalHostTopologyBuilder_ = null; } projectId_ = 0L; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_ReservationBlockPhysicalTopologyInstance_descriptor; } @java.lang.Override public com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance getDefaultInstanceForType() { return com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance .getDefaultInstance(); } @java.lang.Override public com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance build() { com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance buildPartial() { com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance result = new com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.instanceId_ = instanceId_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.physicalHostTopology_ = physicalHostTopologyBuilder_ == null ? physicalHostTopology_ : physicalHostTopologyBuilder_.build(); to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.projectId_ = projectId_; to_bitField0_ |= 0x00000004; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance) { return mergeFrom( (com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance other) { if (other == com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance .getDefaultInstance()) return this; if (other.hasInstanceId()) { setInstanceId(other.getInstanceId()); } if (other.hasPhysicalHostTopology()) { mergePhysicalHostTopology(other.getPhysicalHostTopology()); } if (other.hasProjectId()) { setProjectId(other.getProjectId()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 363907112: { instanceId_ = input.readUInt64(); bitField0_ |= 0x00000001; break; } // case 363907112 case 1420107784: { projectId_ = input.readUInt64(); bitField0_ |= 0x00000004; break; } // case 1420107784 case -1168224782: { input.readMessage( getPhysicalHostTopologyFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case -1168224782 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private long instanceId_; /** * * * <pre> * The InstanceId of the instance * </pre> * * <code>optional uint64 instance_id = 45488389;</code> * * @return Whether the instanceId field is set. */ @java.lang.Override public boolean hasInstanceId() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The InstanceId of the instance * </pre> * * <code>optional uint64 instance_id = 45488389;</code> * * @return The instanceId. */ @java.lang.Override public long getInstanceId() { return instanceId_; } /** * * * <pre> * The InstanceId of the instance * </pre> * * <code>optional uint64 instance_id = 45488389;</code> * * @param value The instanceId to set. * @return This builder for chaining. */ public Builder setInstanceId(long value) { instanceId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The InstanceId of the instance * </pre> * * <code>optional uint64 instance_id = 45488389;</code> * * @return This builder for chaining. */ public Builder clearInstanceId() { bitField0_ = (bitField0_ & ~0x00000001); instanceId_ = 0L; onChanged(); return this; } private com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstancePhysicalHostTopology physicalHostTopology_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.compute.v1 .ReservationBlockPhysicalTopologyInstancePhysicalHostTopology, com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstancePhysicalHostTopology .Builder, com.google.cloud.compute.v1 .ReservationBlockPhysicalTopologyInstancePhysicalHostTopologyOrBuilder> physicalHostTopologyBuilder_; /** * * * <pre> * The PhysicalHostTopology of instances within a Block resource. * </pre> * * <code> * optional .google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstancePhysicalHostTopology physical_host_topology = 390842814; * </code> * * @return Whether the physicalHostTopology field is set. */ public boolean hasPhysicalHostTopology() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The PhysicalHostTopology of instances within a Block resource. * </pre> * * <code> * optional .google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstancePhysicalHostTopology physical_host_topology = 390842814; * </code> * * @return The physicalHostTopology. */ public com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstancePhysicalHostTopology getPhysicalHostTopology() { if (physicalHostTopologyBuilder_ == null) { return physicalHostTopology_ == null ? com.google.cloud.compute.v1 .ReservationBlockPhysicalTopologyInstancePhysicalHostTopology.getDefaultInstance() : physicalHostTopology_; } else { return physicalHostTopologyBuilder_.getMessage(); } } /** * * * <pre> * The PhysicalHostTopology of instances within a Block resource. * </pre> * * <code> * optional .google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstancePhysicalHostTopology physical_host_topology = 390842814; * </code> */ public Builder setPhysicalHostTopology( com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstancePhysicalHostTopology value) { if (physicalHostTopologyBuilder_ == null) { if (value == null) { throw new NullPointerException(); } physicalHostTopology_ = value; } else { physicalHostTopologyBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The PhysicalHostTopology of instances within a Block resource. * </pre> * * <code> * optional .google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstancePhysicalHostTopology physical_host_topology = 390842814; * </code> */ public Builder setPhysicalHostTopology( com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstancePhysicalHostTopology .Builder builderForValue) { if (physicalHostTopologyBuilder_ == null) { physicalHostTopology_ = builderForValue.build(); } else { physicalHostTopologyBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The PhysicalHostTopology of instances within a Block resource. * </pre> * * <code> * optional .google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstancePhysicalHostTopology physical_host_topology = 390842814; * </code> */ public Builder mergePhysicalHostTopology( com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstancePhysicalHostTopology value) { if (physicalHostTopologyBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && physicalHostTopology_ != null && physicalHostTopology_ != com.google.cloud.compute.v1 .ReservationBlockPhysicalTopologyInstancePhysicalHostTopology .getDefaultInstance()) { getPhysicalHostTopologyBuilder().mergeFrom(value); } else { physicalHostTopology_ = value; } } else { physicalHostTopologyBuilder_.mergeFrom(value); } if (physicalHostTopology_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * The PhysicalHostTopology of instances within a Block resource. * </pre> * * <code> * optional .google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstancePhysicalHostTopology physical_host_topology = 390842814; * </code> */ public Builder clearPhysicalHostTopology() { bitField0_ = (bitField0_ & ~0x00000002); physicalHostTopology_ = null; if (physicalHostTopologyBuilder_ != null) { physicalHostTopologyBuilder_.dispose(); physicalHostTopologyBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The PhysicalHostTopology of instances within a Block resource. * </pre> * * <code> * optional .google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstancePhysicalHostTopology physical_host_topology = 390842814; * </code> */ public com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstancePhysicalHostTopology .Builder getPhysicalHostTopologyBuilder() { bitField0_ |= 0x00000002; onChanged(); return getPhysicalHostTopologyFieldBuilder().getBuilder(); } /** * * * <pre> * The PhysicalHostTopology of instances within a Block resource. * </pre> * * <code> * optional .google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstancePhysicalHostTopology physical_host_topology = 390842814; * </code> */ public com.google.cloud.compute.v1 .ReservationBlockPhysicalTopologyInstancePhysicalHostTopologyOrBuilder getPhysicalHostTopologyOrBuilder() { if (physicalHostTopologyBuilder_ != null) { return physicalHostTopologyBuilder_.getMessageOrBuilder(); } else { return physicalHostTopology_ == null ? com.google.cloud.compute.v1 .ReservationBlockPhysicalTopologyInstancePhysicalHostTopology.getDefaultInstance() : physicalHostTopology_; } } /** * * * <pre> * The PhysicalHostTopology of instances within a Block resource. * </pre> * * <code> * optional .google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstancePhysicalHostTopology physical_host_topology = 390842814; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.compute.v1 .ReservationBlockPhysicalTopologyInstancePhysicalHostTopology, com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstancePhysicalHostTopology .Builder, com.google.cloud.compute.v1 .ReservationBlockPhysicalTopologyInstancePhysicalHostTopologyOrBuilder> getPhysicalHostTopologyFieldBuilder() { if (physicalHostTopologyBuilder_ == null) { physicalHostTopologyBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.compute.v1 .ReservationBlockPhysicalTopologyInstancePhysicalHostTopology, com.google.cloud.compute.v1 .ReservationBlockPhysicalTopologyInstancePhysicalHostTopology.Builder, com.google.cloud.compute.v1 .ReservationBlockPhysicalTopologyInstancePhysicalHostTopologyOrBuilder>( getPhysicalHostTopology(), getParentForChildren(), isClean()); physicalHostTopology_ = null; } return physicalHostTopologyBuilder_; } private long projectId_; /** * * * <pre> * Project where the instance lives * </pre> * * <code>optional uint64 project_id = 177513473;</code> * * @return Whether the projectId field is set. */ @java.lang.Override public boolean hasProjectId() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * Project where the instance lives * </pre> * * <code>optional uint64 project_id = 177513473;</code> * * @return The projectId. */ @java.lang.Override public long getProjectId() { return projectId_; } /** * * * <pre> * Project where the instance lives * </pre> * * <code>optional uint64 project_id = 177513473;</code> * * @param value The projectId to set. * @return This builder for chaining. */ public Builder setProjectId(long value) { projectId_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Project where the instance lives * </pre> * * <code>optional uint64 project_id = 177513473;</code> * * @return This builder for chaining. */ public Builder clearProjectId() { bitField0_ = (bitField0_ & ~0x00000004); projectId_ = 0L; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance) } // @@protoc_insertion_point(class_scope:google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance) private static final com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance(); } public static com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ReservationBlockPhysicalTopologyInstance> PARSER = new com.google.protobuf.AbstractParser<ReservationBlockPhysicalTopologyInstance>() { @java.lang.Override public ReservationBlockPhysicalTopologyInstance parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ReservationBlockPhysicalTopologyInstance> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ReservationBlockPhysicalTopologyInstance> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.compute.v1.ReservationBlockPhysicalTopologyInstance getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,230
java-gkehub/proto-google-cloud-gkehub-v1/src/main/java/com/google/cloud/gkehub/configmanagement/v1/HierarchyControllerState.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/gkehub/v1/configmanagement/configmanagement.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.gkehub.configmanagement.v1; /** * * * <pre> * State for Hierarchy Controller * </pre> * * Protobuf type {@code google.cloud.gkehub.configmanagement.v1.HierarchyControllerState} */ public final class HierarchyControllerState extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.gkehub.configmanagement.v1.HierarchyControllerState) HierarchyControllerStateOrBuilder { private static final long serialVersionUID = 0L; // Use HierarchyControllerState.newBuilder() to construct. private HierarchyControllerState(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private HierarchyControllerState() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new HierarchyControllerState(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.gkehub.configmanagement.v1.ConfigManagementProto .internal_static_google_cloud_gkehub_configmanagement_v1_HierarchyControllerState_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.gkehub.configmanagement.v1.ConfigManagementProto .internal_static_google_cloud_gkehub_configmanagement_v1_HierarchyControllerState_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState.class, com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState.Builder.class); } private int bitField0_; public static final int VERSION_FIELD_NUMBER = 1; private com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion version_; /** * * * <pre> * The version for Hierarchy Controller * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion version = 1;</code> * * @return Whether the version field is set. */ @java.lang.Override public boolean hasVersion() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The version for Hierarchy Controller * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion version = 1;</code> * * @return The version. */ @java.lang.Override public com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion getVersion() { return version_ == null ? com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion .getDefaultInstance() : version_; } /** * * * <pre> * The version for Hierarchy Controller * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion version = 1;</code> */ @java.lang.Override public com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersionOrBuilder getVersionOrBuilder() { return version_ == null ? com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion .getDefaultInstance() : version_; } public static final int STATE_FIELD_NUMBER = 2; private com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentState state_; /** * * * <pre> * The deployment state for Hierarchy Controller * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentState state = 2; * </code> * * @return Whether the state field is set. */ @java.lang.Override public boolean hasState() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The deployment state for Hierarchy Controller * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentState state = 2; * </code> * * @return The state. */ @java.lang.Override public com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentState getState() { return state_ == null ? com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentState .getDefaultInstance() : state_; } /** * * * <pre> * The deployment state for Hierarchy Controller * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentState state = 2; * </code> */ @java.lang.Override public com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentStateOrBuilder getStateOrBuilder() { return state_ == null ? com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentState .getDefaultInstance() : state_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getVersion()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getState()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getVersion()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getState()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState)) { return super.equals(obj); } com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState other = (com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState) obj; if (hasVersion() != other.hasVersion()) return false; if (hasVersion()) { if (!getVersion().equals(other.getVersion())) return false; } if (hasState() != other.hasState()) return false; if (hasState()) { if (!getState().equals(other.getState())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasVersion()) { hash = (37 * hash) + VERSION_FIELD_NUMBER; hash = (53 * hash) + getVersion().hashCode(); } if (hasState()) { hash = (37 * hash) + STATE_FIELD_NUMBER; hash = (53 * hash) + getState().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * State for Hierarchy Controller * </pre> * * Protobuf type {@code google.cloud.gkehub.configmanagement.v1.HierarchyControllerState} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.gkehub.configmanagement.v1.HierarchyControllerState) com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerStateOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.gkehub.configmanagement.v1.ConfigManagementProto .internal_static_google_cloud_gkehub_configmanagement_v1_HierarchyControllerState_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.gkehub.configmanagement.v1.ConfigManagementProto .internal_static_google_cloud_gkehub_configmanagement_v1_HierarchyControllerState_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState.class, com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState.Builder.class); } // Construct using // com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getVersionFieldBuilder(); getStateFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; version_ = null; if (versionBuilder_ != null) { versionBuilder_.dispose(); versionBuilder_ = null; } state_ = null; if (stateBuilder_ != null) { stateBuilder_.dispose(); stateBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.gkehub.configmanagement.v1.ConfigManagementProto .internal_static_google_cloud_gkehub_configmanagement_v1_HierarchyControllerState_descriptor; } @java.lang.Override public com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState getDefaultInstanceForType() { return com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState .getDefaultInstance(); } @java.lang.Override public com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState build() { com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState buildPartial() { com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState result = new com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.version_ = versionBuilder_ == null ? version_ : versionBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.state_ = stateBuilder_ == null ? state_ : stateBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState) { return mergeFrom( (com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState other) { if (other == com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState .getDefaultInstance()) return this; if (other.hasVersion()) { mergeVersion(other.getVersion()); } if (other.hasState()) { mergeState(other.getState()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getVersionFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getStateFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion version_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion, com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion.Builder, com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersionOrBuilder> versionBuilder_; /** * * * <pre> * The version for Hierarchy Controller * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion version = 1;</code> * * @return Whether the version field is set. */ public boolean hasVersion() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The version for Hierarchy Controller * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion version = 1;</code> * * @return The version. */ public com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion getVersion() { if (versionBuilder_ == null) { return version_ == null ? com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion .getDefaultInstance() : version_; } else { return versionBuilder_.getMessage(); } } /** * * * <pre> * The version for Hierarchy Controller * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion version = 1;</code> */ public Builder setVersion( com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion value) { if (versionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } version_ = value; } else { versionBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The version for Hierarchy Controller * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion version = 1;</code> */ public Builder setVersion( com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion.Builder builderForValue) { if (versionBuilder_ == null) { version_ = builderForValue.build(); } else { versionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The version for Hierarchy Controller * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion version = 1;</code> */ public Builder mergeVersion( com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion value) { if (versionBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && version_ != null && version_ != com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion .getDefaultInstance()) { getVersionBuilder().mergeFrom(value); } else { version_ = value; } } else { versionBuilder_.mergeFrom(value); } if (version_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * The version for Hierarchy Controller * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion version = 1;</code> */ public Builder clearVersion() { bitField0_ = (bitField0_ & ~0x00000001); version_ = null; if (versionBuilder_ != null) { versionBuilder_.dispose(); versionBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The version for Hierarchy Controller * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion version = 1;</code> */ public com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion.Builder getVersionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getVersionFieldBuilder().getBuilder(); } /** * * * <pre> * The version for Hierarchy Controller * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion version = 1;</code> */ public com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersionOrBuilder getVersionOrBuilder() { if (versionBuilder_ != null) { return versionBuilder_.getMessageOrBuilder(); } else { return version_ == null ? com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion .getDefaultInstance() : version_; } } /** * * * <pre> * The version for Hierarchy Controller * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion version = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion, com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion.Builder, com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersionOrBuilder> getVersionFieldBuilder() { if (versionBuilder_ == null) { versionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion, com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersion.Builder, com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerVersionOrBuilder>( getVersion(), getParentForChildren(), isClean()); version_ = null; } return versionBuilder_; } private com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentState state_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentState, com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentState.Builder, com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentStateOrBuilder> stateBuilder_; /** * * * <pre> * The deployment state for Hierarchy Controller * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentState state = 2; * </code> * * @return Whether the state field is set. */ public boolean hasState() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The deployment state for Hierarchy Controller * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentState state = 2; * </code> * * @return The state. */ public com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentState getState() { if (stateBuilder_ == null) { return state_ == null ? com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentState .getDefaultInstance() : state_; } else { return stateBuilder_.getMessage(); } } /** * * * <pre> * The deployment state for Hierarchy Controller * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentState state = 2; * </code> */ public Builder setState( com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentState value) { if (stateBuilder_ == null) { if (value == null) { throw new NullPointerException(); } state_ = value; } else { stateBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The deployment state for Hierarchy Controller * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentState state = 2; * </code> */ public Builder setState( com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentState.Builder builderForValue) { if (stateBuilder_ == null) { state_ = builderForValue.build(); } else { stateBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The deployment state for Hierarchy Controller * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentState state = 2; * </code> */ public Builder mergeState( com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentState value) { if (stateBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && state_ != null && state_ != com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentState .getDefaultInstance()) { getStateBuilder().mergeFrom(value); } else { state_ = value; } } else { stateBuilder_.mergeFrom(value); } if (state_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * The deployment state for Hierarchy Controller * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentState state = 2; * </code> */ public Builder clearState() { bitField0_ = (bitField0_ & ~0x00000002); state_ = null; if (stateBuilder_ != null) { stateBuilder_.dispose(); stateBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The deployment state for Hierarchy Controller * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentState state = 2; * </code> */ public com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentState.Builder getStateBuilder() { bitField0_ |= 0x00000002; onChanged(); return getStateFieldBuilder().getBuilder(); } /** * * * <pre> * The deployment state for Hierarchy Controller * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentState state = 2; * </code> */ public com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentStateOrBuilder getStateOrBuilder() { if (stateBuilder_ != null) { return stateBuilder_.getMessageOrBuilder(); } else { return state_ == null ? com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentState .getDefaultInstance() : state_; } } /** * * * <pre> * The deployment state for Hierarchy Controller * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentState state = 2; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentState, com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentState.Builder, com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentStateOrBuilder> getStateFieldBuilder() { if (stateBuilder_ == null) { stateBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentState, com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerDeploymentState .Builder, com.google.cloud.gkehub.configmanagement.v1 .HierarchyControllerDeploymentStateOrBuilder>( getState(), getParentForChildren(), isClean()); state_ = null; } return stateBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.gkehub.configmanagement.v1.HierarchyControllerState) } // @@protoc_insertion_point(class_scope:google.cloud.gkehub.configmanagement.v1.HierarchyControllerState) private static final com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState(); } public static com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<HierarchyControllerState> PARSER = new com.google.protobuf.AbstractParser<HierarchyControllerState>() { @java.lang.Override public HierarchyControllerState parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<HierarchyControllerState> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<HierarchyControllerState> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.gkehub.configmanagement.v1.HierarchyControllerState getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,444
java-dialogflow-cx/google-cloud-dialogflow-cx/src/main/java/com/google/cloud/dialogflow/cx/v3beta1/stub/VersionsStubSettings.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.dialogflow.cx.v3beta1.stub; import static com.google.cloud.dialogflow.cx.v3beta1.VersionsClient.ListLocationsPagedResponse; import static com.google.cloud.dialogflow.cx.v3beta1.VersionsClient.ListVersionsPagedResponse; import com.google.api.core.ApiFunction; import com.google.api.core.ApiFuture; import com.google.api.core.BetaApi; import com.google.api.core.ObsoleteApi; import com.google.api.gax.core.GaxProperties; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.GrpcTransportChannel; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.grpc.ProtoOperationTransformers; import com.google.api.gax.httpjson.GaxHttpJsonProperties; import com.google.api.gax.httpjson.HttpJsonTransportChannel; import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider; import com.google.api.gax.longrunning.OperationSnapshot; import com.google.api.gax.longrunning.OperationTimedPollAlgorithm; import com.google.api.gax.retrying.RetrySettings; import com.google.api.gax.rpc.ApiCallContext; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallSettings; import com.google.api.gax.rpc.PageContext; import com.google.api.gax.rpc.PagedCallSettings; import com.google.api.gax.rpc.PagedListDescriptor; import com.google.api.gax.rpc.PagedListResponseFactory; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsRequest; import com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse; import com.google.cloud.dialogflow.cx.v3beta1.CreateVersionOperationMetadata; import com.google.cloud.dialogflow.cx.v3beta1.CreateVersionRequest; import com.google.cloud.dialogflow.cx.v3beta1.DeleteVersionRequest; import com.google.cloud.dialogflow.cx.v3beta1.GetVersionRequest; import com.google.cloud.dialogflow.cx.v3beta1.ListVersionsRequest; import com.google.cloud.dialogflow.cx.v3beta1.ListVersionsResponse; import com.google.cloud.dialogflow.cx.v3beta1.LoadVersionRequest; import com.google.cloud.dialogflow.cx.v3beta1.UpdateVersionRequest; import com.google.cloud.dialogflow.cx.v3beta1.Version; import com.google.cloud.location.GetLocationRequest; import com.google.cloud.location.ListLocationsRequest; import com.google.cloud.location.ListLocationsResponse; import com.google.cloud.location.Location; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.longrunning.Operation; import com.google.protobuf.Empty; import com.google.protobuf.Struct; import java.io.IOException; import java.time.Duration; import java.util.List; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link VersionsStub}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (dialogflow.googleapis.com) and default port (443) are used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the * [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings) * of getVersion: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * VersionsStubSettings.Builder versionsSettingsBuilder = VersionsStubSettings.newBuilder(); * versionsSettingsBuilder * .getVersionSettings() * .setRetrySettings( * versionsSettingsBuilder * .getVersionSettings() * .getRetrySettings() * .toBuilder() * .setInitialRetryDelayDuration(Duration.ofSeconds(1)) * .setInitialRpcTimeoutDuration(Duration.ofSeconds(5)) * .setMaxAttempts(5) * .setMaxRetryDelayDuration(Duration.ofSeconds(30)) * .setMaxRpcTimeoutDuration(Duration.ofSeconds(60)) * .setRetryDelayMultiplier(1.3) * .setRpcTimeoutMultiplier(1.5) * .setTotalTimeoutDuration(Duration.ofSeconds(300)) * .build()); * VersionsStubSettings versionsSettings = versionsSettingsBuilder.build(); * }</pre> * * Please refer to the [Client Side Retry * Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for * additional support in setting retries. * * <p>To configure the RetrySettings of a Long Running Operation method, create an * OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to * configure the RetrySettings for createVersion: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * VersionsStubSettings.Builder versionsSettingsBuilder = VersionsStubSettings.newBuilder(); * TimedRetryAlgorithm timedRetryAlgorithm = * OperationalTimedPollAlgorithm.create( * RetrySettings.newBuilder() * .setInitialRetryDelayDuration(Duration.ofMillis(500)) * .setRetryDelayMultiplier(1.5) * .setMaxRetryDelayDuration(Duration.ofMillis(5000)) * .setTotalTimeoutDuration(Duration.ofHours(24)) * .build()); * versionsSettingsBuilder * .createClusterOperationSettings() * .setPollingAlgorithm(timedRetryAlgorithm) * .build(); * }</pre> */ @BetaApi @Generated("by gapic-generator-java") public class VersionsStubSettings extends StubSettings<VersionsStubSettings> { /** The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder() .add("https://www.googleapis.com/auth/cloud-platform") .add("https://www.googleapis.com/auth/dialogflow") .build(); private final PagedCallSettings< ListVersionsRequest, ListVersionsResponse, ListVersionsPagedResponse> listVersionsSettings; private final UnaryCallSettings<GetVersionRequest, Version> getVersionSettings; private final UnaryCallSettings<CreateVersionRequest, Operation> createVersionSettings; private final OperationCallSettings<CreateVersionRequest, Version, CreateVersionOperationMetadata> createVersionOperationSettings; private final UnaryCallSettings<UpdateVersionRequest, Version> updateVersionSettings; private final UnaryCallSettings<DeleteVersionRequest, Empty> deleteVersionSettings; private final UnaryCallSettings<LoadVersionRequest, Operation> loadVersionSettings; private final OperationCallSettings<LoadVersionRequest, Empty, Struct> loadVersionOperationSettings; private final UnaryCallSettings<CompareVersionsRequest, CompareVersionsResponse> compareVersionsSettings; private final PagedCallSettings< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings; private final UnaryCallSettings<GetLocationRequest, Location> getLocationSettings; private static final PagedListDescriptor<ListVersionsRequest, ListVersionsResponse, Version> LIST_VERSIONS_PAGE_STR_DESC = new PagedListDescriptor<ListVersionsRequest, ListVersionsResponse, Version>() { @Override public String emptyToken() { return ""; } @Override public ListVersionsRequest injectToken(ListVersionsRequest payload, String token) { return ListVersionsRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListVersionsRequest injectPageSize(ListVersionsRequest payload, int pageSize) { return ListVersionsRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(ListVersionsRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListVersionsResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<Version> extractResources(ListVersionsResponse payload) { return payload.getVersionsList(); } }; private static final PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location> LIST_LOCATIONS_PAGE_STR_DESC = new PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location>() { @Override public String emptyToken() { return ""; } @Override public ListLocationsRequest injectToken(ListLocationsRequest payload, String token) { return ListLocationsRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListLocationsRequest injectPageSize(ListLocationsRequest payload, int pageSize) { return ListLocationsRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(ListLocationsRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListLocationsResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<Location> extractResources(ListLocationsResponse payload) { return payload.getLocationsList(); } }; private static final PagedListResponseFactory< ListVersionsRequest, ListVersionsResponse, ListVersionsPagedResponse> LIST_VERSIONS_PAGE_STR_FACT = new PagedListResponseFactory< ListVersionsRequest, ListVersionsResponse, ListVersionsPagedResponse>() { @Override public ApiFuture<ListVersionsPagedResponse> getFuturePagedResponse( UnaryCallable<ListVersionsRequest, ListVersionsResponse> callable, ListVersionsRequest request, ApiCallContext context, ApiFuture<ListVersionsResponse> futureResponse) { PageContext<ListVersionsRequest, ListVersionsResponse, Version> pageContext = PageContext.create(callable, LIST_VERSIONS_PAGE_STR_DESC, request, context); return ListVersionsPagedResponse.createAsync(pageContext, futureResponse); } }; private static final PagedListResponseFactory< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> LIST_LOCATIONS_PAGE_STR_FACT = new PagedListResponseFactory< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>() { @Override public ApiFuture<ListLocationsPagedResponse> getFuturePagedResponse( UnaryCallable<ListLocationsRequest, ListLocationsResponse> callable, ListLocationsRequest request, ApiCallContext context, ApiFuture<ListLocationsResponse> futureResponse) { PageContext<ListLocationsRequest, ListLocationsResponse, Location> pageContext = PageContext.create(callable, LIST_LOCATIONS_PAGE_STR_DESC, request, context); return ListLocationsPagedResponse.createAsync(pageContext, futureResponse); } }; /** Returns the object with the settings used for calls to listVersions. */ public PagedCallSettings<ListVersionsRequest, ListVersionsResponse, ListVersionsPagedResponse> listVersionsSettings() { return listVersionsSettings; } /** Returns the object with the settings used for calls to getVersion. */ public UnaryCallSettings<GetVersionRequest, Version> getVersionSettings() { return getVersionSettings; } /** Returns the object with the settings used for calls to createVersion. */ public UnaryCallSettings<CreateVersionRequest, Operation> createVersionSettings() { return createVersionSettings; } /** Returns the object with the settings used for calls to createVersion. */ public OperationCallSettings<CreateVersionRequest, Version, CreateVersionOperationMetadata> createVersionOperationSettings() { return createVersionOperationSettings; } /** Returns the object with the settings used for calls to updateVersion. */ public UnaryCallSettings<UpdateVersionRequest, Version> updateVersionSettings() { return updateVersionSettings; } /** Returns the object with the settings used for calls to deleteVersion. */ public UnaryCallSettings<DeleteVersionRequest, Empty> deleteVersionSettings() { return deleteVersionSettings; } /** Returns the object with the settings used for calls to loadVersion. */ public UnaryCallSettings<LoadVersionRequest, Operation> loadVersionSettings() { return loadVersionSettings; } /** Returns the object with the settings used for calls to loadVersion. */ public OperationCallSettings<LoadVersionRequest, Empty, Struct> loadVersionOperationSettings() { return loadVersionOperationSettings; } /** Returns the object with the settings used for calls to compareVersions. */ public UnaryCallSettings<CompareVersionsRequest, CompareVersionsResponse> compareVersionsSettings() { return compareVersionsSettings; } /** Returns the object with the settings used for calls to listLocations. */ public PagedCallSettings<ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings() { return listLocationsSettings; } /** Returns the object with the settings used for calls to getLocation. */ public UnaryCallSettings<GetLocationRequest, Location> getLocationSettings() { return getLocationSettings; } public VersionsStub createStub() throws IOException { if (getTransportChannelProvider() .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcVersionsStub.create(this); } if (getTransportChannelProvider() .getTransportName() .equals(HttpJsonTransportChannel.getHttpJsonTransportName())) { return HttpJsonVersionsStub.create(this); } throw new UnsupportedOperationException( String.format( "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns the default service name. */ @Override public String getServiceName() { return "dialogflow"; } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstantiatingExecutorProvider.newBuilder(); } /** Returns the default service endpoint. */ @ObsoleteApi("Use getEndpoint() instead") public static String getDefaultEndpoint() { return "dialogflow.googleapis.com:443"; } /** Returns the default mTLS service endpoint. */ public static String getDefaultMtlsEndpoint() { return "dialogflow.mtls.googleapis.com:443"; } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return GoogleCredentialsProvider.newBuilder() .setScopesToApply(DEFAULT_SERVICE_SCOPES) .setUseJwtAccessWithScope(true); } /** Returns a builder for the default gRPC ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return InstantiatingGrpcChannelProvider.newBuilder() .setMaxInboundMessageSize(Integer.MAX_VALUE); } /** Returns a builder for the default REST ChannelProvider for this service. */ @BetaApi public static InstantiatingHttpJsonChannelProvider.Builder defaultHttpJsonTransportProviderBuilder() { return InstantiatingHttpJsonChannelProvider.newBuilder(); } public static TransportChannelProvider defaultTransportChannelProvider() { return defaultGrpcTransportProviderBuilder().build(); } public static ApiClientHeaderProvider.Builder defaultGrpcApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken("gapic", GaxProperties.getLibraryVersion(VersionsStubSettings.class)) .setTransportToken( GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion()); } public static ApiClientHeaderProvider.Builder defaultHttpJsonApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken("gapic", GaxProperties.getLibraryVersion(VersionsStubSettings.class)) .setTransportToken( GaxHttpJsonProperties.getHttpJsonTokenName(), GaxHttpJsonProperties.getHttpJsonVersion()); } public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return VersionsStubSettings.defaultGrpcApiClientHeaderProviderBuilder(); } /** Returns a new gRPC builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new REST builder for this class. */ public static Builder newHttpJsonBuilder() { return Builder.createHttpJsonDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected VersionsStubSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); listVersionsSettings = settingsBuilder.listVersionsSettings().build(); getVersionSettings = settingsBuilder.getVersionSettings().build(); createVersionSettings = settingsBuilder.createVersionSettings().build(); createVersionOperationSettings = settingsBuilder.createVersionOperationSettings().build(); updateVersionSettings = settingsBuilder.updateVersionSettings().build(); deleteVersionSettings = settingsBuilder.deleteVersionSettings().build(); loadVersionSettings = settingsBuilder.loadVersionSettings().build(); loadVersionOperationSettings = settingsBuilder.loadVersionOperationSettings().build(); compareVersionsSettings = settingsBuilder.compareVersionsSettings().build(); listLocationsSettings = settingsBuilder.listLocationsSettings().build(); getLocationSettings = settingsBuilder.getLocationSettings().build(); } /** Builder for VersionsStubSettings. */ public static class Builder extends StubSettings.Builder<VersionsStubSettings, Builder> { private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders; private final PagedCallSettings.Builder< ListVersionsRequest, ListVersionsResponse, ListVersionsPagedResponse> listVersionsSettings; private final UnaryCallSettings.Builder<GetVersionRequest, Version> getVersionSettings; private final UnaryCallSettings.Builder<CreateVersionRequest, Operation> createVersionSettings; private final OperationCallSettings.Builder< CreateVersionRequest, Version, CreateVersionOperationMetadata> createVersionOperationSettings; private final UnaryCallSettings.Builder<UpdateVersionRequest, Version> updateVersionSettings; private final UnaryCallSettings.Builder<DeleteVersionRequest, Empty> deleteVersionSettings; private final UnaryCallSettings.Builder<LoadVersionRequest, Operation> loadVersionSettings; private final OperationCallSettings.Builder<LoadVersionRequest, Empty, Struct> loadVersionOperationSettings; private final UnaryCallSettings.Builder<CompareVersionsRequest, CompareVersionsResponse> compareVersionsSettings; private final PagedCallSettings.Builder< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings; private final UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings; private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions = ImmutableMap.builder(); definitions.put( "retry_policy_0_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList(StatusCode.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder(); RetrySettings settings = null; settings = RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(100L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelayDuration(Duration.ofMillis(60000L)) .setInitialRpcTimeoutDuration(Duration.ofMillis(60000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ofMillis(60000L)) .setTotalTimeoutDuration(Duration.ofMillis(60000L)) .build(); definitions.put("retry_policy_0_params", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(clientContext); listVersionsSettings = PagedCallSettings.newBuilder(LIST_VERSIONS_PAGE_STR_FACT); getVersionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); createVersionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); createVersionOperationSettings = OperationCallSettings.newBuilder(); updateVersionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); deleteVersionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); loadVersionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); loadVersionOperationSettings = OperationCallSettings.newBuilder(); compareVersionsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); listLocationsSettings = PagedCallSettings.newBuilder(LIST_LOCATIONS_PAGE_STR_FACT); getLocationSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( listVersionsSettings, getVersionSettings, createVersionSettings, updateVersionSettings, deleteVersionSettings, loadVersionSettings, compareVersionsSettings, listLocationsSettings, getLocationSettings); initDefaults(this); } protected Builder(VersionsStubSettings settings) { super(settings); listVersionsSettings = settings.listVersionsSettings.toBuilder(); getVersionSettings = settings.getVersionSettings.toBuilder(); createVersionSettings = settings.createVersionSettings.toBuilder(); createVersionOperationSettings = settings.createVersionOperationSettings.toBuilder(); updateVersionSettings = settings.updateVersionSettings.toBuilder(); deleteVersionSettings = settings.deleteVersionSettings.toBuilder(); loadVersionSettings = settings.loadVersionSettings.toBuilder(); loadVersionOperationSettings = settings.loadVersionOperationSettings.toBuilder(); compareVersionsSettings = settings.compareVersionsSettings.toBuilder(); listLocationsSettings = settings.listLocationsSettings.toBuilder(); getLocationSettings = settings.getLocationSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( listVersionsSettings, getVersionSettings, createVersionSettings, updateVersionSettings, deleteVersionSettings, loadVersionSettings, compareVersionsSettings, listLocationsSettings, getLocationSettings); } private static Builder createDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder createHttpJsonDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultHttpJsonTransportProviderBuilder().build()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultHttpJsonApiClientHeaderProviderBuilder().build()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder initDefaults(Builder builder) { builder .listVersionsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .getVersionSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .createVersionSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .updateVersionSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .deleteVersionSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .loadVersionSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .compareVersionsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .listLocationsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .getLocationSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .createVersionOperationSettings() .setInitialCallSettings( UnaryCallSettings .<CreateVersionRequest, OperationSnapshot>newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(Version.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create( CreateVersionOperationMetadata.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(5000L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelayDuration(Duration.ofMillis(45000L)) .setInitialRpcTimeoutDuration(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ZERO) .setTotalTimeoutDuration(Duration.ofMillis(300000L)) .build())); builder .loadVersionOperationSettings() .setInitialCallSettings( UnaryCallSettings.<LoadVersionRequest, OperationSnapshot>newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(Empty.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(Struct.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(5000L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelayDuration(Duration.ofMillis(45000L)) .setInitialRpcTimeoutDuration(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ZERO) .setTotalTimeoutDuration(Duration.ofMillis(300000L)) .build())); return builder; } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); return this; } public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() { return unaryMethodSettingsBuilders; } /** Returns the builder for the settings used for calls to listVersions. */ public PagedCallSettings.Builder< ListVersionsRequest, ListVersionsResponse, ListVersionsPagedResponse> listVersionsSettings() { return listVersionsSettings; } /** Returns the builder for the settings used for calls to getVersion. */ public UnaryCallSettings.Builder<GetVersionRequest, Version> getVersionSettings() { return getVersionSettings; } /** Returns the builder for the settings used for calls to createVersion. */ public UnaryCallSettings.Builder<CreateVersionRequest, Operation> createVersionSettings() { return createVersionSettings; } /** Returns the builder for the settings used for calls to createVersion. */ public OperationCallSettings.Builder< CreateVersionRequest, Version, CreateVersionOperationMetadata> createVersionOperationSettings() { return createVersionOperationSettings; } /** Returns the builder for the settings used for calls to updateVersion. */ public UnaryCallSettings.Builder<UpdateVersionRequest, Version> updateVersionSettings() { return updateVersionSettings; } /** Returns the builder for the settings used for calls to deleteVersion. */ public UnaryCallSettings.Builder<DeleteVersionRequest, Empty> deleteVersionSettings() { return deleteVersionSettings; } /** Returns the builder for the settings used for calls to loadVersion. */ public UnaryCallSettings.Builder<LoadVersionRequest, Operation> loadVersionSettings() { return loadVersionSettings; } /** Returns the builder for the settings used for calls to loadVersion. */ public OperationCallSettings.Builder<LoadVersionRequest, Empty, Struct> loadVersionOperationSettings() { return loadVersionOperationSettings; } /** Returns the builder for the settings used for calls to compareVersions. */ public UnaryCallSettings.Builder<CompareVersionsRequest, CompareVersionsResponse> compareVersionsSettings() { return compareVersionsSettings; } /** Returns the builder for the settings used for calls to listLocations. */ public PagedCallSettings.Builder< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings() { return listLocationsSettings; } /** Returns the builder for the settings used for calls to getLocation. */ public UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings() { return getLocationSettings; } @Override public VersionsStubSettings build() throws IOException { return new VersionsStubSettings(this); } } }
googleapis/google-cloud-java
35,227
java-shopping-merchant-inventories/proto-google-shopping-merchant-inventories-v1/src/main/java/com/google/shopping/merchant/inventories/v1/InsertLocalInventoryRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/shopping/merchant/inventories/v1/localinventory.proto // Protobuf Java Version: 3.25.8 package com.google.shopping.merchant.inventories.v1; /** * * * <pre> * Request message for the `InsertLocalInventory` method. * </pre> * * Protobuf type {@code google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest} */ public final class InsertLocalInventoryRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest) InsertLocalInventoryRequestOrBuilder { private static final long serialVersionUID = 0L; // Use InsertLocalInventoryRequest.newBuilder() to construct. private InsertLocalInventoryRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private InsertLocalInventoryRequest() { parent_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new InsertLocalInventoryRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.inventories.v1.LocalInventoryProto .internal_static_google_shopping_merchant_inventories_v1_InsertLocalInventoryRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.inventories.v1.LocalInventoryProto .internal_static_google_shopping_merchant_inventories_v1_InsertLocalInventoryRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest.class, com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The account and product where this inventory will be inserted. * Format: `accounts/{account}/products/{product}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The account and product where this inventory will be inserted. * Format: `accounts/{account}/products/{product}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int LOCAL_INVENTORY_FIELD_NUMBER = 2; private com.google.shopping.merchant.inventories.v1.LocalInventory localInventory_; /** * * * <pre> * Required. Local inventory information of the product. If the product * already has a `LocalInventory` resource for the same `storeCode`, full * replacement of the `LocalInventory` resource is performed. * </pre> * * <code> * .google.shopping.merchant.inventories.v1.LocalInventory local_inventory = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the localInventory field is set. */ @java.lang.Override public boolean hasLocalInventory() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Local inventory information of the product. If the product * already has a `LocalInventory` resource for the same `storeCode`, full * replacement of the `LocalInventory` resource is performed. * </pre> * * <code> * .google.shopping.merchant.inventories.v1.LocalInventory local_inventory = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The localInventory. */ @java.lang.Override public com.google.shopping.merchant.inventories.v1.LocalInventory getLocalInventory() { return localInventory_ == null ? com.google.shopping.merchant.inventories.v1.LocalInventory.getDefaultInstance() : localInventory_; } /** * * * <pre> * Required. Local inventory information of the product. If the product * already has a `LocalInventory` resource for the same `storeCode`, full * replacement of the `LocalInventory` resource is performed. * </pre> * * <code> * .google.shopping.merchant.inventories.v1.LocalInventory local_inventory = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.shopping.merchant.inventories.v1.LocalInventoryOrBuilder getLocalInventoryOrBuilder() { return localInventory_ == null ? com.google.shopping.merchant.inventories.v1.LocalInventory.getDefaultInstance() : localInventory_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getLocalInventory()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getLocalInventory()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest)) { return super.equals(obj); } com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest other = (com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest) obj; if (!getParent().equals(other.getParent())) return false; if (hasLocalInventory() != other.hasLocalInventory()) return false; if (hasLocalInventory()) { if (!getLocalInventory().equals(other.getLocalInventory())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (hasLocalInventory()) { hash = (37 * hash) + LOCAL_INVENTORY_FIELD_NUMBER; hash = (53 * hash) + getLocalInventory().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for the `InsertLocalInventory` method. * </pre> * * Protobuf type {@code google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest) com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.inventories.v1.LocalInventoryProto .internal_static_google_shopping_merchant_inventories_v1_InsertLocalInventoryRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.inventories.v1.LocalInventoryProto .internal_static_google_shopping_merchant_inventories_v1_InsertLocalInventoryRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest.class, com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest.Builder .class); } // Construct using // com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getLocalInventoryFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; localInventory_ = null; if (localInventoryBuilder_ != null) { localInventoryBuilder_.dispose(); localInventoryBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.shopping.merchant.inventories.v1.LocalInventoryProto .internal_static_google_shopping_merchant_inventories_v1_InsertLocalInventoryRequest_descriptor; } @java.lang.Override public com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest getDefaultInstanceForType() { return com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest .getDefaultInstance(); } @java.lang.Override public com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest build() { com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest buildPartial() { com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest result = new com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.localInventory_ = localInventoryBuilder_ == null ? localInventory_ : localInventoryBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest) { return mergeFrom( (com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest other) { if (other == com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest .getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasLocalInventory()) { mergeLocalInventory(other.getLocalInventory()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getLocalInventoryFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The account and product where this inventory will be inserted. * Format: `accounts/{account}/products/{product}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The account and product where this inventory will be inserted. * Format: `accounts/{account}/products/{product}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The account and product where this inventory will be inserted. * Format: `accounts/{account}/products/{product}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The account and product where this inventory will be inserted. * Format: `accounts/{account}/products/{product}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The account and product where this inventory will be inserted. * Format: `accounts/{account}/products/{product}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.shopping.merchant.inventories.v1.LocalInventory localInventory_; private com.google.protobuf.SingleFieldBuilderV3< com.google.shopping.merchant.inventories.v1.LocalInventory, com.google.shopping.merchant.inventories.v1.LocalInventory.Builder, com.google.shopping.merchant.inventories.v1.LocalInventoryOrBuilder> localInventoryBuilder_; /** * * * <pre> * Required. Local inventory information of the product. If the product * already has a `LocalInventory` resource for the same `storeCode`, full * replacement of the `LocalInventory` resource is performed. * </pre> * * <code> * .google.shopping.merchant.inventories.v1.LocalInventory local_inventory = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the localInventory field is set. */ public boolean hasLocalInventory() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Local inventory information of the product. If the product * already has a `LocalInventory` resource for the same `storeCode`, full * replacement of the `LocalInventory` resource is performed. * </pre> * * <code> * .google.shopping.merchant.inventories.v1.LocalInventory local_inventory = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The localInventory. */ public com.google.shopping.merchant.inventories.v1.LocalInventory getLocalInventory() { if (localInventoryBuilder_ == null) { return localInventory_ == null ? com.google.shopping.merchant.inventories.v1.LocalInventory.getDefaultInstance() : localInventory_; } else { return localInventoryBuilder_.getMessage(); } } /** * * * <pre> * Required. Local inventory information of the product. If the product * already has a `LocalInventory` resource for the same `storeCode`, full * replacement of the `LocalInventory` resource is performed. * </pre> * * <code> * .google.shopping.merchant.inventories.v1.LocalInventory local_inventory = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setLocalInventory( com.google.shopping.merchant.inventories.v1.LocalInventory value) { if (localInventoryBuilder_ == null) { if (value == null) { throw new NullPointerException(); } localInventory_ = value; } else { localInventoryBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Local inventory information of the product. If the product * already has a `LocalInventory` resource for the same `storeCode`, full * replacement of the `LocalInventory` resource is performed. * </pre> * * <code> * .google.shopping.merchant.inventories.v1.LocalInventory local_inventory = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setLocalInventory( com.google.shopping.merchant.inventories.v1.LocalInventory.Builder builderForValue) { if (localInventoryBuilder_ == null) { localInventory_ = builderForValue.build(); } else { localInventoryBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Local inventory information of the product. If the product * already has a `LocalInventory` resource for the same `storeCode`, full * replacement of the `LocalInventory` resource is performed. * </pre> * * <code> * .google.shopping.merchant.inventories.v1.LocalInventory local_inventory = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeLocalInventory( com.google.shopping.merchant.inventories.v1.LocalInventory value) { if (localInventoryBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && localInventory_ != null && localInventory_ != com.google.shopping.merchant.inventories.v1.LocalInventory .getDefaultInstance()) { getLocalInventoryBuilder().mergeFrom(value); } else { localInventory_ = value; } } else { localInventoryBuilder_.mergeFrom(value); } if (localInventory_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. Local inventory information of the product. If the product * already has a `LocalInventory` resource for the same `storeCode`, full * replacement of the `LocalInventory` resource is performed. * </pre> * * <code> * .google.shopping.merchant.inventories.v1.LocalInventory local_inventory = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearLocalInventory() { bitField0_ = (bitField0_ & ~0x00000002); localInventory_ = null; if (localInventoryBuilder_ != null) { localInventoryBuilder_.dispose(); localInventoryBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Local inventory information of the product. If the product * already has a `LocalInventory` resource for the same `storeCode`, full * replacement of the `LocalInventory` resource is performed. * </pre> * * <code> * .google.shopping.merchant.inventories.v1.LocalInventory local_inventory = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.shopping.merchant.inventories.v1.LocalInventory.Builder getLocalInventoryBuilder() { bitField0_ |= 0x00000002; onChanged(); return getLocalInventoryFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Local inventory information of the product. If the product * already has a `LocalInventory` resource for the same `storeCode`, full * replacement of the `LocalInventory` resource is performed. * </pre> * * <code> * .google.shopping.merchant.inventories.v1.LocalInventory local_inventory = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.shopping.merchant.inventories.v1.LocalInventoryOrBuilder getLocalInventoryOrBuilder() { if (localInventoryBuilder_ != null) { return localInventoryBuilder_.getMessageOrBuilder(); } else { return localInventory_ == null ? com.google.shopping.merchant.inventories.v1.LocalInventory.getDefaultInstance() : localInventory_; } } /** * * * <pre> * Required. Local inventory information of the product. If the product * already has a `LocalInventory` resource for the same `storeCode`, full * replacement of the `LocalInventory` resource is performed. * </pre> * * <code> * .google.shopping.merchant.inventories.v1.LocalInventory local_inventory = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.shopping.merchant.inventories.v1.LocalInventory, com.google.shopping.merchant.inventories.v1.LocalInventory.Builder, com.google.shopping.merchant.inventories.v1.LocalInventoryOrBuilder> getLocalInventoryFieldBuilder() { if (localInventoryBuilder_ == null) { localInventoryBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.shopping.merchant.inventories.v1.LocalInventory, com.google.shopping.merchant.inventories.v1.LocalInventory.Builder, com.google.shopping.merchant.inventories.v1.LocalInventoryOrBuilder>( getLocalInventory(), getParentForChildren(), isClean()); localInventory_ = null; } return localInventoryBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest) } // @@protoc_insertion_point(class_scope:google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest) private static final com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest(); } public static com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<InsertLocalInventoryRequest> PARSER = new com.google.protobuf.AbstractParser<InsertLocalInventoryRequest>() { @java.lang.Override public InsertLocalInventoryRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<InsertLocalInventoryRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<InsertLocalInventoryRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.shopping.merchant.inventories.v1.InsertLocalInventoryRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,434
java-dataplex/google-cloud-dataplex/src/main/java/com/google/cloud/dataplex/v1/stub/MetadataServiceStubSettings.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.dataplex.v1.stub; import static com.google.cloud.dataplex.v1.MetadataServiceClient.ListEntitiesPagedResponse; import static com.google.cloud.dataplex.v1.MetadataServiceClient.ListLocationsPagedResponse; import static com.google.cloud.dataplex.v1.MetadataServiceClient.ListPartitionsPagedResponse; import com.google.api.core.ApiFunction; import com.google.api.core.ApiFuture; import com.google.api.core.BetaApi; import com.google.api.core.ObsoleteApi; import com.google.api.gax.core.GaxProperties; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.GrpcTransportChannel; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.httpjson.GaxHttpJsonProperties; import com.google.api.gax.httpjson.HttpJsonTransportChannel; import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider; import com.google.api.gax.retrying.RetrySettings; import com.google.api.gax.rpc.ApiCallContext; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.PageContext; import com.google.api.gax.rpc.PagedCallSettings; import com.google.api.gax.rpc.PagedListDescriptor; import com.google.api.gax.rpc.PagedListResponseFactory; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.dataplex.v1.CreateEntityRequest; import com.google.cloud.dataplex.v1.CreatePartitionRequest; import com.google.cloud.dataplex.v1.DeleteEntityRequest; import com.google.cloud.dataplex.v1.DeletePartitionRequest; import com.google.cloud.dataplex.v1.Entity; import com.google.cloud.dataplex.v1.GetEntityRequest; import com.google.cloud.dataplex.v1.GetPartitionRequest; import com.google.cloud.dataplex.v1.ListEntitiesRequest; import com.google.cloud.dataplex.v1.ListEntitiesResponse; import com.google.cloud.dataplex.v1.ListPartitionsRequest; import com.google.cloud.dataplex.v1.ListPartitionsResponse; import com.google.cloud.dataplex.v1.Partition; import com.google.cloud.dataplex.v1.UpdateEntityRequest; import com.google.cloud.location.GetLocationRequest; import com.google.cloud.location.ListLocationsRequest; import com.google.cloud.location.ListLocationsResponse; import com.google.cloud.location.Location; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.protobuf.Empty; import java.io.IOException; import java.time.Duration; import java.util.List; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link MetadataServiceStub}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (dataplex.googleapis.com) and default port (443) are used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the * [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings) * of createEntity: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * MetadataServiceStubSettings.Builder metadataServiceSettingsBuilder = * MetadataServiceStubSettings.newBuilder(); * metadataServiceSettingsBuilder * .createEntitySettings() * .setRetrySettings( * metadataServiceSettingsBuilder * .createEntitySettings() * .getRetrySettings() * .toBuilder() * .setInitialRetryDelayDuration(Duration.ofSeconds(1)) * .setInitialRpcTimeoutDuration(Duration.ofSeconds(5)) * .setMaxAttempts(5) * .setMaxRetryDelayDuration(Duration.ofSeconds(30)) * .setMaxRpcTimeoutDuration(Duration.ofSeconds(60)) * .setRetryDelayMultiplier(1.3) * .setRpcTimeoutMultiplier(1.5) * .setTotalTimeoutDuration(Duration.ofSeconds(300)) * .build()); * MetadataServiceStubSettings metadataServiceSettings = metadataServiceSettingsBuilder.build(); * }</pre> * * Please refer to the [Client Side Retry * Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for * additional support in setting retries. */ @Generated("by gapic-generator-java") public class MetadataServiceStubSettings extends StubSettings<MetadataServiceStubSettings> { /** The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder().add("https://www.googleapis.com/auth/cloud-platform").build(); private final UnaryCallSettings<CreateEntityRequest, Entity> createEntitySettings; private final UnaryCallSettings<UpdateEntityRequest, Entity> updateEntitySettings; private final UnaryCallSettings<DeleteEntityRequest, Empty> deleteEntitySettings; private final UnaryCallSettings<GetEntityRequest, Entity> getEntitySettings; private final PagedCallSettings< ListEntitiesRequest, ListEntitiesResponse, ListEntitiesPagedResponse> listEntitiesSettings; private final UnaryCallSettings<CreatePartitionRequest, Partition> createPartitionSettings; private final UnaryCallSettings<DeletePartitionRequest, Empty> deletePartitionSettings; private final UnaryCallSettings<GetPartitionRequest, Partition> getPartitionSettings; private final PagedCallSettings< ListPartitionsRequest, ListPartitionsResponse, ListPartitionsPagedResponse> listPartitionsSettings; private final PagedCallSettings< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings; private final UnaryCallSettings<GetLocationRequest, Location> getLocationSettings; private static final PagedListDescriptor<ListEntitiesRequest, ListEntitiesResponse, Entity> LIST_ENTITIES_PAGE_STR_DESC = new PagedListDescriptor<ListEntitiesRequest, ListEntitiesResponse, Entity>() { @Override public String emptyToken() { return ""; } @Override public ListEntitiesRequest injectToken(ListEntitiesRequest payload, String token) { return ListEntitiesRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListEntitiesRequest injectPageSize(ListEntitiesRequest payload, int pageSize) { return ListEntitiesRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(ListEntitiesRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListEntitiesResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<Entity> extractResources(ListEntitiesResponse payload) { return payload.getEntitiesList(); } }; private static final PagedListDescriptor<ListPartitionsRequest, ListPartitionsResponse, Partition> LIST_PARTITIONS_PAGE_STR_DESC = new PagedListDescriptor<ListPartitionsRequest, ListPartitionsResponse, Partition>() { @Override public String emptyToken() { return ""; } @Override public ListPartitionsRequest injectToken(ListPartitionsRequest payload, String token) { return ListPartitionsRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListPartitionsRequest injectPageSize( ListPartitionsRequest payload, int pageSize) { return ListPartitionsRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(ListPartitionsRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListPartitionsResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<Partition> extractResources(ListPartitionsResponse payload) { return payload.getPartitionsList(); } }; private static final PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location> LIST_LOCATIONS_PAGE_STR_DESC = new PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location>() { @Override public String emptyToken() { return ""; } @Override public ListLocationsRequest injectToken(ListLocationsRequest payload, String token) { return ListLocationsRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListLocationsRequest injectPageSize(ListLocationsRequest payload, int pageSize) { return ListLocationsRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(ListLocationsRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListLocationsResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<Location> extractResources(ListLocationsResponse payload) { return payload.getLocationsList(); } }; private static final PagedListResponseFactory< ListEntitiesRequest, ListEntitiesResponse, ListEntitiesPagedResponse> LIST_ENTITIES_PAGE_STR_FACT = new PagedListResponseFactory< ListEntitiesRequest, ListEntitiesResponse, ListEntitiesPagedResponse>() { @Override public ApiFuture<ListEntitiesPagedResponse> getFuturePagedResponse( UnaryCallable<ListEntitiesRequest, ListEntitiesResponse> callable, ListEntitiesRequest request, ApiCallContext context, ApiFuture<ListEntitiesResponse> futureResponse) { PageContext<ListEntitiesRequest, ListEntitiesResponse, Entity> pageContext = PageContext.create(callable, LIST_ENTITIES_PAGE_STR_DESC, request, context); return ListEntitiesPagedResponse.createAsync(pageContext, futureResponse); } }; private static final PagedListResponseFactory< ListPartitionsRequest, ListPartitionsResponse, ListPartitionsPagedResponse> LIST_PARTITIONS_PAGE_STR_FACT = new PagedListResponseFactory< ListPartitionsRequest, ListPartitionsResponse, ListPartitionsPagedResponse>() { @Override public ApiFuture<ListPartitionsPagedResponse> getFuturePagedResponse( UnaryCallable<ListPartitionsRequest, ListPartitionsResponse> callable, ListPartitionsRequest request, ApiCallContext context, ApiFuture<ListPartitionsResponse> futureResponse) { PageContext<ListPartitionsRequest, ListPartitionsResponse, Partition> pageContext = PageContext.create(callable, LIST_PARTITIONS_PAGE_STR_DESC, request, context); return ListPartitionsPagedResponse.createAsync(pageContext, futureResponse); } }; private static final PagedListResponseFactory< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> LIST_LOCATIONS_PAGE_STR_FACT = new PagedListResponseFactory< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>() { @Override public ApiFuture<ListLocationsPagedResponse> getFuturePagedResponse( UnaryCallable<ListLocationsRequest, ListLocationsResponse> callable, ListLocationsRequest request, ApiCallContext context, ApiFuture<ListLocationsResponse> futureResponse) { PageContext<ListLocationsRequest, ListLocationsResponse, Location> pageContext = PageContext.create(callable, LIST_LOCATIONS_PAGE_STR_DESC, request, context); return ListLocationsPagedResponse.createAsync(pageContext, futureResponse); } }; /** Returns the object with the settings used for calls to createEntity. */ public UnaryCallSettings<CreateEntityRequest, Entity> createEntitySettings() { return createEntitySettings; } /** Returns the object with the settings used for calls to updateEntity. */ public UnaryCallSettings<UpdateEntityRequest, Entity> updateEntitySettings() { return updateEntitySettings; } /** Returns the object with the settings used for calls to deleteEntity. */ public UnaryCallSettings<DeleteEntityRequest, Empty> deleteEntitySettings() { return deleteEntitySettings; } /** Returns the object with the settings used for calls to getEntity. */ public UnaryCallSettings<GetEntityRequest, Entity> getEntitySettings() { return getEntitySettings; } /** Returns the object with the settings used for calls to listEntities. */ public PagedCallSettings<ListEntitiesRequest, ListEntitiesResponse, ListEntitiesPagedResponse> listEntitiesSettings() { return listEntitiesSettings; } /** Returns the object with the settings used for calls to createPartition. */ public UnaryCallSettings<CreatePartitionRequest, Partition> createPartitionSettings() { return createPartitionSettings; } /** Returns the object with the settings used for calls to deletePartition. */ public UnaryCallSettings<DeletePartitionRequest, Empty> deletePartitionSettings() { return deletePartitionSettings; } /** Returns the object with the settings used for calls to getPartition. */ public UnaryCallSettings<GetPartitionRequest, Partition> getPartitionSettings() { return getPartitionSettings; } /** Returns the object with the settings used for calls to listPartitions. */ public PagedCallSettings< ListPartitionsRequest, ListPartitionsResponse, ListPartitionsPagedResponse> listPartitionsSettings() { return listPartitionsSettings; } /** Returns the object with the settings used for calls to listLocations. */ public PagedCallSettings<ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings() { return listLocationsSettings; } /** Returns the object with the settings used for calls to getLocation. */ public UnaryCallSettings<GetLocationRequest, Location> getLocationSettings() { return getLocationSettings; } public MetadataServiceStub createStub() throws IOException { if (getTransportChannelProvider() .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcMetadataServiceStub.create(this); } if (getTransportChannelProvider() .getTransportName() .equals(HttpJsonTransportChannel.getHttpJsonTransportName())) { return HttpJsonMetadataServiceStub.create(this); } throw new UnsupportedOperationException( String.format( "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns the default service name. */ @Override public String getServiceName() { return "dataplex"; } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstantiatingExecutorProvider.newBuilder(); } /** Returns the default service endpoint. */ @ObsoleteApi("Use getEndpoint() instead") public static String getDefaultEndpoint() { return "dataplex.googleapis.com:443"; } /** Returns the default mTLS service endpoint. */ public static String getDefaultMtlsEndpoint() { return "dataplex.mtls.googleapis.com:443"; } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return GoogleCredentialsProvider.newBuilder() .setScopesToApply(DEFAULT_SERVICE_SCOPES) .setUseJwtAccessWithScope(true); } /** Returns a builder for the default gRPC ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return InstantiatingGrpcChannelProvider.newBuilder() .setMaxInboundMessageSize(Integer.MAX_VALUE); } /** Returns a builder for the default REST ChannelProvider for this service. */ @BetaApi public static InstantiatingHttpJsonChannelProvider.Builder defaultHttpJsonTransportProviderBuilder() { return InstantiatingHttpJsonChannelProvider.newBuilder(); } public static TransportChannelProvider defaultTransportChannelProvider() { return defaultGrpcTransportProviderBuilder().build(); } public static ApiClientHeaderProvider.Builder defaultGrpcApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken( "gapic", GaxProperties.getLibraryVersion(MetadataServiceStubSettings.class)) .setTransportToken( GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion()); } public static ApiClientHeaderProvider.Builder defaultHttpJsonApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken( "gapic", GaxProperties.getLibraryVersion(MetadataServiceStubSettings.class)) .setTransportToken( GaxHttpJsonProperties.getHttpJsonTokenName(), GaxHttpJsonProperties.getHttpJsonVersion()); } public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return MetadataServiceStubSettings.defaultGrpcApiClientHeaderProviderBuilder(); } /** Returns a new gRPC builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new REST builder for this class. */ public static Builder newHttpJsonBuilder() { return Builder.createHttpJsonDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected MetadataServiceStubSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); createEntitySettings = settingsBuilder.createEntitySettings().build(); updateEntitySettings = settingsBuilder.updateEntitySettings().build(); deleteEntitySettings = settingsBuilder.deleteEntitySettings().build(); getEntitySettings = settingsBuilder.getEntitySettings().build(); listEntitiesSettings = settingsBuilder.listEntitiesSettings().build(); createPartitionSettings = settingsBuilder.createPartitionSettings().build(); deletePartitionSettings = settingsBuilder.deletePartitionSettings().build(); getPartitionSettings = settingsBuilder.getPartitionSettings().build(); listPartitionsSettings = settingsBuilder.listPartitionsSettings().build(); listLocationsSettings = settingsBuilder.listLocationsSettings().build(); getLocationSettings = settingsBuilder.getLocationSettings().build(); } /** Builder for MetadataServiceStubSettings. */ public static class Builder extends StubSettings.Builder<MetadataServiceStubSettings, Builder> { private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders; private final UnaryCallSettings.Builder<CreateEntityRequest, Entity> createEntitySettings; private final UnaryCallSettings.Builder<UpdateEntityRequest, Entity> updateEntitySettings; private final UnaryCallSettings.Builder<DeleteEntityRequest, Empty> deleteEntitySettings; private final UnaryCallSettings.Builder<GetEntityRequest, Entity> getEntitySettings; private final PagedCallSettings.Builder< ListEntitiesRequest, ListEntitiesResponse, ListEntitiesPagedResponse> listEntitiesSettings; private final UnaryCallSettings.Builder<CreatePartitionRequest, Partition> createPartitionSettings; private final UnaryCallSettings.Builder<DeletePartitionRequest, Empty> deletePartitionSettings; private final UnaryCallSettings.Builder<GetPartitionRequest, Partition> getPartitionSettings; private final PagedCallSettings.Builder< ListPartitionsRequest, ListPartitionsResponse, ListPartitionsPagedResponse> listPartitionsSettings; private final PagedCallSettings.Builder< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings; private final UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings; private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions = ImmutableMap.builder(); definitions.put( "no_retry_7_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList())); definitions.put( "retry_policy_2_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList(StatusCode.Code.UNAVAILABLE))); definitions.put( "retry_policy_3_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList(StatusCode.Code.UNAVAILABLE))); definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList())); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder(); RetrySettings settings = null; settings = RetrySettings.newBuilder() .setInitialRpcTimeoutDuration(Duration.ofMillis(60000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ofMillis(60000L)) .setTotalTimeoutDuration(Duration.ofMillis(60000L)) .build(); definitions.put("no_retry_7_params", settings); settings = RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(1000L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelayDuration(Duration.ofMillis(10000L)) .setInitialRpcTimeoutDuration(Duration.ofMillis(60000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ofMillis(60000L)) .setTotalTimeoutDuration(Duration.ofMillis(60000L)) .build(); definitions.put("retry_policy_2_params", settings); settings = RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(1000L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelayDuration(Duration.ofMillis(10000L)) .setInitialRpcTimeoutDuration(Duration.ofMillis(60000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ofMillis(60000L)) .setTotalTimeoutDuration(Duration.ofMillis(60000L)) .build(); definitions.put("retry_policy_3_params", settings); settings = RetrySettings.newBuilder().setRpcTimeoutMultiplier(1.0).build(); definitions.put("no_retry_params", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(clientContext); createEntitySettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); updateEntitySettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); deleteEntitySettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); getEntitySettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); listEntitiesSettings = PagedCallSettings.newBuilder(LIST_ENTITIES_PAGE_STR_FACT); createPartitionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); deletePartitionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); getPartitionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); listPartitionsSettings = PagedCallSettings.newBuilder(LIST_PARTITIONS_PAGE_STR_FACT); listLocationsSettings = PagedCallSettings.newBuilder(LIST_LOCATIONS_PAGE_STR_FACT); getLocationSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( createEntitySettings, updateEntitySettings, deleteEntitySettings, getEntitySettings, listEntitiesSettings, createPartitionSettings, deletePartitionSettings, getPartitionSettings, listPartitionsSettings, listLocationsSettings, getLocationSettings); initDefaults(this); } protected Builder(MetadataServiceStubSettings settings) { super(settings); createEntitySettings = settings.createEntitySettings.toBuilder(); updateEntitySettings = settings.updateEntitySettings.toBuilder(); deleteEntitySettings = settings.deleteEntitySettings.toBuilder(); getEntitySettings = settings.getEntitySettings.toBuilder(); listEntitiesSettings = settings.listEntitiesSettings.toBuilder(); createPartitionSettings = settings.createPartitionSettings.toBuilder(); deletePartitionSettings = settings.deletePartitionSettings.toBuilder(); getPartitionSettings = settings.getPartitionSettings.toBuilder(); listPartitionsSettings = settings.listPartitionsSettings.toBuilder(); listLocationsSettings = settings.listLocationsSettings.toBuilder(); getLocationSettings = settings.getLocationSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( createEntitySettings, updateEntitySettings, deleteEntitySettings, getEntitySettings, listEntitiesSettings, createPartitionSettings, deletePartitionSettings, getPartitionSettings, listPartitionsSettings, listLocationsSettings, getLocationSettings); } private static Builder createDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder createHttpJsonDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultHttpJsonTransportProviderBuilder().build()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultHttpJsonApiClientHeaderProviderBuilder().build()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder initDefaults(Builder builder) { builder .createEntitySettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_7_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_7_params")); builder .updateEntitySettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_7_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_7_params")); builder .deleteEntitySettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_7_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_7_params")); builder .getEntitySettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); builder .listEntitiesSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); builder .createPartitionSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_7_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_7_params")); builder .deletePartitionSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_7_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_7_params")); builder .getPartitionSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); builder .listPartitionsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); builder .listLocationsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params")); builder .getLocationSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params")); return builder; } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); return this; } public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() { return unaryMethodSettingsBuilders; } /** Returns the builder for the settings used for calls to createEntity. */ public UnaryCallSettings.Builder<CreateEntityRequest, Entity> createEntitySettings() { return createEntitySettings; } /** Returns the builder for the settings used for calls to updateEntity. */ public UnaryCallSettings.Builder<UpdateEntityRequest, Entity> updateEntitySettings() { return updateEntitySettings; } /** Returns the builder for the settings used for calls to deleteEntity. */ public UnaryCallSettings.Builder<DeleteEntityRequest, Empty> deleteEntitySettings() { return deleteEntitySettings; } /** Returns the builder for the settings used for calls to getEntity. */ public UnaryCallSettings.Builder<GetEntityRequest, Entity> getEntitySettings() { return getEntitySettings; } /** Returns the builder for the settings used for calls to listEntities. */ public PagedCallSettings.Builder< ListEntitiesRequest, ListEntitiesResponse, ListEntitiesPagedResponse> listEntitiesSettings() { return listEntitiesSettings; } /** Returns the builder for the settings used for calls to createPartition. */ public UnaryCallSettings.Builder<CreatePartitionRequest, Partition> createPartitionSettings() { return createPartitionSettings; } /** Returns the builder for the settings used for calls to deletePartition. */ public UnaryCallSettings.Builder<DeletePartitionRequest, Empty> deletePartitionSettings() { return deletePartitionSettings; } /** Returns the builder for the settings used for calls to getPartition. */ public UnaryCallSettings.Builder<GetPartitionRequest, Partition> getPartitionSettings() { return getPartitionSettings; } /** Returns the builder for the settings used for calls to listPartitions. */ public PagedCallSettings.Builder< ListPartitionsRequest, ListPartitionsResponse, ListPartitionsPagedResponse> listPartitionsSettings() { return listPartitionsSettings; } /** Returns the builder for the settings used for calls to listLocations. */ public PagedCallSettings.Builder< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings() { return listLocationsSettings; } /** Returns the builder for the settings used for calls to getLocation. */ public UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings() { return getLocationSettings; } @Override public MetadataServiceStubSettings build() throws IOException { return new MetadataServiceStubSettings(this); } } }
apache/nifi
35,365
nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.processors.standard; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; import org.apache.nifi.annotation.behavior.SideEffectFree; import org.apache.nifi.annotation.behavior.TriggerSerially; import org.apache.nifi.annotation.documentation.CapabilityDescription; import org.apache.nifi.annotation.documentation.Tags; import org.apache.nifi.annotation.documentation.UseCase; import org.apache.nifi.annotation.lifecycle.OnScheduled; import org.apache.nifi.components.AllowableValue; import org.apache.nifi.components.DescribedValue; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.components.ValidationContext; import org.apache.nifi.components.ValidationResult; import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.DataUnit; import org.apache.nifi.processor.FlowFileFilter; import org.apache.nifi.processor.FlowFileFilter.FlowFileFilterResult; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.util.timebuffer.LongEntityAccess; import org.apache.nifi.util.timebuffer.TimedBuffer; import org.apache.nifi.util.timebuffer.TimestampedLong; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.ReentrantLock; import java.util.function.LongSupplier; import java.util.regex.Pattern; @SideEffectFree @TriggerSerially @InputRequirement(Requirement.INPUT_REQUIRED) @Tags({"rate control", "throttle", "rate", "throughput"}) @CapabilityDescription("Controls the rate at which data is transferred to follow-on processors." + " If you configure a very small Time Duration, then the accuracy of the throttle gets worse." + " You can improve this accuracy by decreasing the Yield Duration, at the expense of more Tasks given to the processor.") @UseCase(description = "Limit the rate at which data is sent to a downstream system with little to no bursts", keywords = {"throttle", "limit", "slow down", "data rate"}, configuration = """ Set the "Rate Control Criteria" to `data rate`. Set the "Time Duration" property to `1 sec`. Configure the "Maximum Rate" property to specify how much data should be allowed through each second. For example, to allow through 8 MB per second, set "Maximum Rate" to `8 MB`. """ ) @UseCase(description = "Limit the rate at which FlowFiles are sent to a downstream system with little to no bursts", keywords = {"throttle", "limit", "slow down", "flowfile rate"}, configuration = """ Set the "Rate Control Criteria" to `flowfile count`. Set the "Time Duration" property to `1 sec`. Configure the "Maximum Rate" property to specify how many FlowFiles should be allowed through each second. For example, to allow through 100 FlowFiles per second, set "Maximum Rate" to `100`. """ ) @UseCase(description = "Reject requests that exceed a specific rate with little to no bursts", keywords = {"throttle", "limit", "slow down", "request rate"}, configuration = """ Set the "Rate Control Criteria" to `flowfile count`. Set the "Time Duration" property to `1 sec`. Set the "Rate Exceeded Strategy" property to `Route to 'rate exceeded'`. Configure the "Maximum Rate" property to specify how many requests should be allowed through each second. For example, to allow through 100 requests per second, set "Maximum Rate" to `100`. If more than 100 requests come in during any one second, the additional requests will be routed to `rate exceeded` instead of `success`. """ ) @UseCase(description = "Reject requests that exceed a specific rate, allowing for bursts", keywords = {"throttle", "limit", "slow down", "request rate"}, configuration = """ Set the "Rate Control Criteria" to `flowfile count`. Set the "Time Duration" property to `1 min`. Set the "Rate Exceeded Strategy" property to `Route to 'rate exceeded'`. Configure the "Maximum Rate" property to specify how many requests should be allowed through each minute. For example, to allow through 100 requests per second, set "Maximum Rate" to `6000`. This will allow through 6,000 FlowFiles per minute, which averages to 100 FlowFiles per second. However, those 6,000 FlowFiles may come all within the first couple of seconds, or they may come in over a period of 60 seconds. As a result, this gives us an average rate of 100 FlowFiles per second but allows for bursts of data. If more than 6,000 requests come in during any one minute, the additional requests will be routed to `rate exceeded` instead of `success`. """ ) public class ControlRate extends AbstractProcessor { static final AllowableValue HOLD_FLOWFILE = new AllowableValue("Hold FlowFile", "Hold FlowFile", "The FlowFile will be held in its input queue until the rate of data has fallen below the configured maximum and will then be allowed through."); static final AllowableValue ROUTE_TO_RATE_EXCEEDED = new AllowableValue("Route to 'rate exceeded'", "Route to 'rate exceeded'", "The FlowFile will be routed to the 'rate exceeded' Relationship."); // based on testing to balance commits and 10,000 FF swap limit public static final int MAX_FLOW_FILES_PER_BATCH = 1000; private static final long DEFAULT_ACCRUAL_COUNT = -1L; public static final PropertyDescriptor RATE_CONTROL_CRITERIA = new PropertyDescriptor.Builder() .name("Rate Control Criteria") .description("Indicates the criteria that is used to control the throughput rate. Changing this value resets the rate counters.") .required(true) .allowableValues(RateControlCriteria.class) .defaultValue(RateControlCriteria.DATA_RATE) .build(); public static final PropertyDescriptor MAX_RATE = new PropertyDescriptor.Builder() .name("Maximum Rate") .description("The maximum rate at which data should pass through this processor. The format of this property is expected to be a " + "positive integer, or a Data Size (such as '1 MB') if Rate Control Criteria is set to 'data rate'.") .required(false) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) // validated in customValidate b/c dependent on Rate Control Criteria .dependsOn(RATE_CONTROL_CRITERIA, RateControlCriteria.DATA_RATE, RateControlCriteria.FLOWFILE_RATE, RateControlCriteria.ATTRIBUTE_RATE) .build(); public static final PropertyDescriptor MAX_DATA_RATE = new PropertyDescriptor.Builder() .name("Maximum Data Rate") .description("The maximum rate at which data should pass through this processor. The format of this property is expected to be a " + "Data Size (such as '1 MB') representing bytes per Time Duration.") .required(false) .addValidator(StandardValidators.DATA_SIZE_VALIDATOR) .dependsOn(RATE_CONTROL_CRITERIA, RateControlCriteria.DATA_OR_FLOWFILE_RATE) .build(); public static final PropertyDescriptor MAX_COUNT_RATE = new PropertyDescriptor.Builder() .name("Maximum FlowFile Rate") .description("The maximum rate at which FlowFiles should pass through this processor. The format of this property is expected to be a " + "positive integer representing FlowFiles count per Time Duration") .required(false) .addValidator(StandardValidators.POSITIVE_LONG_VALIDATOR) .dependsOn(RATE_CONTROL_CRITERIA, RateControlCriteria.DATA_OR_FLOWFILE_RATE) .build(); public static final PropertyDescriptor RATE_EXCEEDED_STRATEGY = new PropertyDescriptor.Builder() .name("Rate Exceeded Strategy") .description("Specifies how to handle an incoming FlowFile when the maximum data rate has been exceeded.") .required(true) .allowableValues(HOLD_FLOWFILE, ROUTE_TO_RATE_EXCEEDED) .defaultValue(HOLD_FLOWFILE.getValue()) .build(); public static final PropertyDescriptor RATE_CONTROL_ATTRIBUTE_NAME = new PropertyDescriptor.Builder() .name("Rate Controlled Attribute") .description("The name of an attribute whose values build toward the rate limit if Rate Control Criteria is set to 'attribute value'. " + "The value of the attribute referenced by this property must be a positive long, or the FlowFile will be routed to failure. " + "This value is ignored if Rate Control Criteria is not set to 'attribute value'. Changing this value resets the rate counters.") .required(false) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .expressionLanguageSupported(ExpressionLanguageScope.NONE) .dependsOn(RATE_CONTROL_CRITERIA, RateControlCriteria.ATTRIBUTE_RATE) .build(); public static final PropertyDescriptor TIME_PERIOD = new PropertyDescriptor.Builder() .name("Time Duration") .description("The amount of time to which the Maximum Rate pertains. Changing this value resets the rate counters.") .required(true) .addValidator(StandardValidators.createTimePeriodValidator(1, TimeUnit.SECONDS, Integer.MAX_VALUE, TimeUnit.SECONDS)) .defaultValue("1 min") .build(); public static final PropertyDescriptor GROUPING_ATTRIBUTE_NAME = new PropertyDescriptor.Builder() .name("Grouping Attribute") .description("By default, a single \"throttle\" is used for all FlowFiles. If this value is specified, a separate throttle is used for " + "each value specified by the attribute with this name. Changing this value resets the rate counters.") .required(false) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .expressionLanguageSupported(ExpressionLanguageScope.NONE) .build(); private static final List<PropertyDescriptor> PROPERTY_DESCRIPTORS = List.of( RATE_CONTROL_CRITERIA, TIME_PERIOD, MAX_RATE, MAX_DATA_RATE, MAX_COUNT_RATE, RATE_EXCEEDED_STRATEGY, RATE_CONTROL_ATTRIBUTE_NAME, GROUPING_ATTRIBUTE_NAME ); static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("FlowFiles are transferred to this relationship under normal conditions") .build(); static final Relationship REL_FAILURE = new Relationship.Builder() .name("failure") .description("FlowFiles will be routed to this relationship if they are missing a necessary Rate Controlled Attribute or the attribute is not in the expected format") .build(); static final Relationship REL_RATE_EXCEEDED = new Relationship.Builder() .name("rate exceeded") .description("A FlowFile will be routed to this Relationship if it results in exceeding the maximum threshold allowed based on the Processor's configuration and if the Rate Exceeded " + "Strategy is configured to use this Relationship.") .build(); private static final Set<Relationship> DEFAULT_RELATIONSHIPS = Set.of( REL_SUCCESS, REL_FAILURE ); private static final Set<Relationship> RATE_EXCEEDED_RELATIONSHIPS = Set.of( REL_SUCCESS, REL_FAILURE, REL_RATE_EXCEEDED ); private static final Pattern POSITIVE_LONG_PATTERN = Pattern.compile("0*[1-9][0-9]*"); private static final String DEFAULT_GROUP_ATTRIBUTE = ControlRate.class.getName() + "###____DEFAULT_GROUP_ATTRIBUTE___###"; private volatile Set<Relationship> relationships = DEFAULT_RELATIONSHIPS; private final ConcurrentMap<String, Throttle> dataThrottleMap = new ConcurrentHashMap<>(); private final ConcurrentMap<String, Throttle> countThrottleMap = new ConcurrentHashMap<>(); private final AtomicLong lastThrottleClearTime = new AtomicLong(getCurrentTimeMillis()); private volatile RateControlCriteria rateControlCriteria = null; private volatile String rateControlAttribute = null; private volatile String maximumRateStr = null; private volatile String maximumCountRateStr = null; private volatile String groupingAttributeName = null; private volatile int timePeriodSeconds = 1; @Override protected List<PropertyDescriptor> getSupportedPropertyDescriptors() { return PROPERTY_DESCRIPTORS; } @Override public Set<Relationship> getRelationships() { return relationships; } @Override protected Collection<ValidationResult> customValidate(final ValidationContext context) { final List<ValidationResult> validationResults = new ArrayList<>(super.customValidate(context)); switch (context.getProperty(RATE_CONTROL_CRITERIA).asAllowableValue(RateControlCriteria.class)) { case DATA_OR_FLOWFILE_RATE: // enforce validators to be sure properties are configured; they are only required for DATA_OR_FLOWFILE_RATE criteria validationResults.add(StandardValidators.DATA_SIZE_VALIDATOR.validate(MAX_DATA_RATE.getDisplayName(), context.getProperty(MAX_DATA_RATE).getValue(), context)); validationResults.add(StandardValidators.POSITIVE_LONG_VALIDATOR.validate(MAX_COUNT_RATE.getDisplayName(), context.getProperty(MAX_COUNT_RATE).getValue(), context)); break; case DATA_RATE: validationResults.add(StandardValidators.DATA_SIZE_VALIDATOR.validate("Maximum Rate", context.getProperty(MAX_RATE).getValue(), context)); break; case ATTRIBUTE_RATE: final String rateAttr = context.getProperty(RATE_CONTROL_ATTRIBUTE_NAME).getValue(); if (rateAttr == null) { validationResults.add(new ValidationResult.Builder() .subject(RATE_CONTROL_ATTRIBUTE_NAME.getName()) .explanation("property must be set if using <Rate Control Criteria> of 'attribute value'") .build()); } // fallthrough // is intentional since the "Maximum Rate" property must be filled in when using either of DATA_RATE, FLOWFILE_RATE or ATTRIBUTE_RATE. case FLOWFILE_RATE: validationResults.add(StandardValidators.POSITIVE_LONG_VALIDATOR.validate("Maximum Rate", context.getProperty(MAX_RATE).getValue(), context)); break; } return validationResults; } @Override public void onPropertyModified(final PropertyDescriptor descriptor, final String oldValue, final String newValue) { super.onPropertyModified(descriptor, oldValue, newValue); if (descriptor.equals(RATE_EXCEEDED_STRATEGY)) { if (ROUTE_TO_RATE_EXCEEDED.getValue().equalsIgnoreCase(newValue)) { this.relationships = RATE_EXCEEDED_RELATIONSHIPS; } else { this.relationships = DEFAULT_RELATIONSHIPS; } } if (descriptor.equals(RATE_CONTROL_CRITERIA) || descriptor.equals(RATE_CONTROL_ATTRIBUTE_NAME) || descriptor.equals(GROUPING_ATTRIBUTE_NAME) || descriptor.equals(TIME_PERIOD)) { // if the criteria that is being used to determine limits/throttles is changed, we must clear our throttle map. dataThrottleMap.clear(); countThrottleMap.clear(); } else if (descriptor.equals(MAX_RATE) || descriptor.equals(MAX_DATA_RATE)) { // MAX_RATE could affect either throttle map; MAX_DATA_RATE only affects data throttle map final long newRate; if (newValue != null) { if (DataUnit.DATA_SIZE_PATTERN.matcher(newValue.toUpperCase()).matches()) { newRate = DataUnit.parseDataSize(newValue, DataUnit.B).longValue(); } else { newRate = Long.parseLong(newValue); } if (dataThrottleRequired()) { for (final Throttle throttle : dataThrottleMap.values()) { throttle.setMaxRate(newRate); } } if (countThrottleRequired()) { for (final Throttle throttle : countThrottleMap.values()) { throttle.setMaxRate(newRate); } } } } else if (descriptor.equals(MAX_COUNT_RATE)) { // MAX_COUNT_RATE only affects count throttle map long newRate; try { newRate = Long.parseLong(newValue); } catch (NumberFormatException nfe) { newRate = -1; } for (final Throttle throttle : countThrottleMap.values()) { throttle.setMaxRate(newRate); } } } @OnScheduled public void onScheduled(final ProcessContext context) { rateControlCriteria = context.getProperty(RATE_CONTROL_CRITERIA).asAllowableValue(RateControlCriteria.class); rateControlAttribute = rateControlCriteria == RateControlCriteria.ATTRIBUTE_RATE ? context.getProperty(RATE_CONTROL_ATTRIBUTE_NAME).getValue() : null; if (dataThrottleRequired()) { // Use MAX_DATA_RATE only for DATA_OR_FLOWFILE_RATE criteria maximumRateStr = rateControlCriteria == RateControlCriteria.DATA_OR_FLOWFILE_RATE ? context.getProperty(MAX_DATA_RATE).getValue().toUpperCase() : context.getProperty(MAX_RATE).getValue().toUpperCase(); } if (countThrottleRequired()) { // Use MAX_COUNT_RATE only for DATA_OR_FLOWFILE_RATE criteria maximumCountRateStr = rateControlCriteria == RateControlCriteria.DATA_OR_FLOWFILE_RATE ? context.getProperty(MAX_COUNT_RATE).getValue() : context.getProperty(MAX_RATE).getValue(); } groupingAttributeName = context.getProperty(GROUPING_ATTRIBUTE_NAME).getValue(); timePeriodSeconds = context.getProperty(TIME_PERIOD).asTimePeriod(TimeUnit.SECONDS).intValue(); } @Override public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { final String strategy = context.getProperty(RATE_EXCEEDED_STRATEGY).getValue(); if (ROUTE_TO_RATE_EXCEEDED.getValue().equalsIgnoreCase(strategy)) { routeFlowFilesExceedingRate(context, session); } else { holdFlowFilesExceedingRate(context, session); } } private void routeFlowFilesExceedingRate(final ProcessContext context, final ProcessSession session) { clearExpiredThrottles(context); final List<FlowFile> flowFiles = session.get(MAX_FLOW_FILES_PER_BATCH); if (flowFiles.isEmpty()) { context.yield(); return; } final ThrottleFilter filter = new ThrottleFilter(MAX_FLOW_FILES_PER_BATCH, this::getCurrentTimeMillis); for (final FlowFile flowFile : flowFiles) { final Relationship relationship; if (!isRateAttributeValid(flowFile)) { relationship = REL_FAILURE; } else { final FlowFileFilterResult result = filter.filter(flowFile); relationship = result.isAccept() ? REL_SUCCESS : REL_RATE_EXCEEDED; } session.transfer(flowFile, relationship); getLogger().info("Routing {} to {}", flowFile, relationship.getName()); session.getProvenanceReporter().route(flowFile, relationship); } } private void holdFlowFilesExceedingRate(final ProcessContext context, final ProcessSession session) { clearExpiredThrottles(context); final List<FlowFile> flowFiles = session.get(new ThrottleFilter(MAX_FLOW_FILES_PER_BATCH, this::getCurrentTimeMillis)); if (flowFiles.isEmpty()) { context.yield(); return; } final ComponentLog logger = getLogger(); for (FlowFile flowFile : flowFiles) { // call this to capture potential error if (isRateAttributeValid(flowFile)) { logger.info("transferring {} to 'success'", flowFile); session.transfer(flowFile, REL_SUCCESS); } else { logger.error("Routing {} to 'failure' due to missing or invalid attribute", flowFile); session.transfer(flowFile, REL_FAILURE); } } } private void clearExpiredThrottles(final ProcessContext context) { // Periodically clear any Throttle that has not been used in more than 2 throttling periods final long lastClearTime = lastThrottleClearTime.get(); final long throttleExpirationMillis = getCurrentTimeMillis() - 2 * context.getProperty(TIME_PERIOD).asTimePeriod(TimeUnit.MILLISECONDS); if (lastClearTime < throttleExpirationMillis) { if (lastThrottleClearTime.compareAndSet(lastClearTime, getCurrentTimeMillis())) { final Set<Map.Entry<String, Throttle>> throttleSet = new HashSet<>(); if (dataThrottleRequired()) { throttleSet.addAll(dataThrottleMap.entrySet()); } if (countThrottleRequired()) { throttleSet.addAll(countThrottleMap.entrySet()); } final Iterator<Map.Entry<String, Throttle>> itr = throttleSet.iterator(); while (itr.hasNext()) { final Map.Entry<String, Throttle> entry = itr.next(); final Throttle throttle = entry.getValue(); if (throttle.tryLock()) { try { if (throttle.lastUpdateTime() < lastClearTime) { itr.remove(); } } finally { throttle.unlock(); } } } } } } /** * Get current time in milliseconds * * @return Current time in milliseconds from System */ protected long getCurrentTimeMillis() { return System.currentTimeMillis(); } /* * Determine if the accrual amount is valid for the type of throttle being applied. For example, if throttling based on * flowfile attribute, the specified attribute must be present and must be a long integer. */ private boolean isRateAttributeValid(FlowFile flowFile) { if (rateControlCriteria == RateControlCriteria.ATTRIBUTE_RATE) { final String attributeValue = flowFile.getAttribute(rateControlAttribute); return attributeValue != null && POSITIVE_LONG_PATTERN.matcher(attributeValue).matches(); } return true; } /* * Determine the amount this FlowFile will incur against the maximum allowed rate. * This is applicable to data size accrual only */ private long getDataSizeAccrual(FlowFile flowFile) { return flowFile.getSize(); } /* * Determine the amount this FlowFile will incur against the maximum allowed rate. * This is applicable to counting accruals, flowfiles or attributes */ private long getCountAccrual(FlowFile flowFile) { return switch (rateControlCriteria) { case DATA_RATE -> DEFAULT_ACCRUAL_COUNT; case FLOWFILE_RATE, DATA_OR_FLOWFILE_RATE -> 1; case ATTRIBUTE_RATE -> { final String attributeValue = flowFile.getAttribute(rateControlAttribute); if (attributeValue == null) { yield DEFAULT_ACCRUAL_COUNT; } if (!POSITIVE_LONG_PATTERN.matcher(attributeValue).matches()) { yield DEFAULT_ACCRUAL_COUNT; } yield Long.parseLong(attributeValue); } }; } private boolean dataThrottleRequired() { return rateControlCriteria != null && switch (rateControlCriteria) { case DATA_RATE, DATA_OR_FLOWFILE_RATE -> true; default -> false; }; } private boolean countThrottleRequired() { return rateControlCriteria != null && switch (rateControlCriteria) { case FLOWFILE_RATE, DATA_OR_FLOWFILE_RATE, ATTRIBUTE_RATE -> true; default -> false; }; } enum RateControlCriteria implements DescribedValue { DATA_RATE("data rate", "Rate is controlled by counting bytes transferred per time duration."), FLOWFILE_RATE("flowfile count", "Rate is controlled by counting FlowFiles transferred per time duration"), ATTRIBUTE_RATE("attribute value", "Rate is controlled by accumulating the value of a specified attribute that is transferred per time duration"), DATA_OR_FLOWFILE_RATE("data rate or flowfile count", "Rate is controlled by counting bytes and FlowFiles transferred per time duration; if either threshold is met, throttling is enforced"); private final String value; private final String description; RateControlCriteria(final String value, final String description) { this.value = value; this.description = description; } @Override public String getValue() { return this.value; } @Override public String getDisplayName() { return this.value; } @Override public String getDescription() { return this.description; } } private static class Throttle extends ReentrantLock { private final AtomicLong maxRate = new AtomicLong(1L); private final long timePeriodMillis; private final TimedBuffer<TimestampedLong> timedBuffer; private final ComponentLog logger; private final LongSupplier currentTimeSupplier; private volatile long penalizationPeriod = 0; private volatile long penalizationExpired = 0; private volatile long lastUpdateTime; private Throttle(final int timePeriod, final TimeUnit unit, final ComponentLog logger, final LongSupplier currentTimeSupplier) { this.timePeriodMillis = TimeUnit.MILLISECONDS.convert(timePeriod, unit); this.timedBuffer = new TimedBuffer<>(unit, timePeriod, new LongEntityAccess(), currentTimeSupplier); this.logger = logger; this.currentTimeSupplier = currentTimeSupplier; } public void setMaxRate(final long maxRate) { this.maxRate.set(maxRate); } public long lastUpdateTime() { return lastUpdateTime; } public boolean tryAdd(final long value) { // value should never be negative, but if it is return immediately if (value < 0) { return false; } final long now = currentTimeSupplier.getAsLong(); if (penalizationExpired > now) { return false; } final long maxRateValue = maxRate.get(); final TimestampedLong sum = timedBuffer.getAggregateValue(timePeriodMillis); if (sum != null && sum.getValue() >= maxRateValue) { if (logger.isDebugEnabled()) { logger.debug("current sum for throttle is {} at time {}, so not allowing rate of {} through", sum.getValue(), sum.getTimestamp(), value); } return false; } // Implement the Throttle penalization based on how much extra 'amountOver' was allowed through if (penalizationPeriod > 0) { if (logger.isDebugEnabled()) { logger.debug("Starting Throttle penalization, expiring {} milliseconds from now", penalizationPeriod); } penalizationExpired = now + penalizationPeriod; penalizationPeriod = 0; return false; } if (logger.isDebugEnabled()) { logger.debug("current sum for throttle is {} at time {}, so allowing rate of {} through", sum == null ? 0 : sum.getValue(), sum == null ? 0 : sum.getTimestamp(), value); } final long transferred = timedBuffer.add(new TimestampedLong(value)).getValue(); if (transferred > maxRateValue) { final long amountOver = transferred - maxRateValue; // determine how long it should take to transfer 'amountOver' and 'penalize' the Throttle for that long final double pct = (double) amountOver / (double) maxRateValue; this.penalizationPeriod = (long) (timePeriodMillis * pct); if (logger.isDebugEnabled()) { logger.debug("allowing rate of {} through but penalizing Throttle for {} milliseconds", value, penalizationPeriod); } } lastUpdateTime = now; return true; } } private class ThrottleFilter implements FlowFileFilter { private final int flowFilesPerBatch; private final LongSupplier currentTimeSupplier; private int flowFilesInBatch = 0; ThrottleFilter(final int maxFFPerBatch, final LongSupplier currentTimeSupplier) { this.flowFilesPerBatch = maxFFPerBatch; this.currentTimeSupplier = currentTimeSupplier; } @Override public FlowFileFilterResult filter(FlowFile flowFile) { if (!isRateAttributeValid(flowFile)) { // this FlowFile is invalid for this configuration so let the processor deal with it return FlowFileFilterResult.ACCEPT_AND_TERMINATE; } String groupName = (groupingAttributeName == null) ? DEFAULT_GROUP_ATTRIBUTE : flowFile.getAttribute(groupingAttributeName); // the flow file may not have the required attribute: in this case it is considered part // of the DEFAULT_GROUP_ATTRIBUTE if (groupName == null) { groupName = DEFAULT_GROUP_ATTRIBUTE; } Throttle dataThrottle = dataThrottleMap.get(groupName); Throttle countThrottle = countThrottleMap.get(groupName); boolean dataThrottlingActive = false; if (dataThrottleRequired()) { if (dataThrottle == null) { dataThrottle = new Throttle(timePeriodSeconds, TimeUnit.SECONDS, getLogger(), currentTimeSupplier); dataThrottle.setMaxRate(DataUnit.parseDataSize(maximumRateStr, DataUnit.B).longValue()); dataThrottleMap.put(groupName, dataThrottle); } dataThrottle.lock(); try { if (dataThrottle.tryAdd(getDataSizeAccrual(flowFile))) { flowFilesInBatch++; if (flowFilesInBatch >= flowFilesPerBatch) { flowFilesInBatch = 0; return FlowFileFilterResult.ACCEPT_AND_TERMINATE; } else { // only accept flowfile if additional count throttle does not need to run if (!countThrottleRequired()) { return FlowFileFilterResult.ACCEPT_AND_CONTINUE; } } } else { dataThrottlingActive = true; } } finally { dataThrottle.unlock(); } } // continue processing count throttle only if required and if data throttle is not already limiting flowfiles if (countThrottleRequired() && !dataThrottlingActive) { if (countThrottle == null) { countThrottle = new Throttle(timePeriodSeconds, TimeUnit.SECONDS, getLogger(), currentTimeSupplier); countThrottle.setMaxRate(Long.parseLong(maximumCountRateStr)); countThrottleMap.put(groupName, countThrottle); } countThrottle.lock(); try { if (countThrottle.tryAdd(getCountAccrual(flowFile))) { flowFilesInBatch++; if (flowFilesInBatch >= flowFilesPerBatch) { flowFilesInBatch = 0; return FlowFileFilterResult.ACCEPT_AND_TERMINATE; } else { return FlowFileFilterResult.ACCEPT_AND_CONTINUE; } } } finally { countThrottle.unlock(); } } // If we are not using a grouping attribute, then no FlowFile will be able to continue on. So we can // just TERMINATE the iteration over FlowFiles. // However, if we are using a grouping attribute, then another FlowFile in the queue may be able to proceed, // so we want to continue our iteration. if (groupingAttributeName == null) { return FlowFileFilterResult.REJECT_AND_TERMINATE; } return FlowFileFilterResult.REJECT_AND_CONTINUE; } } }
apache/commons-lang
35,478
src/test/java/org/apache/commons/lang3/ArrayUtilsAddTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.lang3; import static org.apache.commons.lang3.LangAssertions.assertIllegalArgumentException; import static org.apache.commons.lang3.LangAssertions.assertIndexOutOfBoundsException; import static org.junit.jupiter.api.Assertions.assertArrayEquals; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNotSame; import static org.junit.jupiter.api.Assertions.assertNull; import org.junit.jupiter.api.Test; /** * Tests ArrayUtils add methods. */ class ArrayUtilsAddTest extends AbstractLangTest { @Test void testAddFirstBoolean() { boolean[] newArray; newArray = ArrayUtils.addFirst(null, false); assertArrayEquals(new boolean[] { false }, newArray); assertEquals(Boolean.TYPE, newArray.getClass().getComponentType()); newArray = ArrayUtils.addFirst(null, true); assertArrayEquals(new boolean[] { true }, newArray); assertEquals(Boolean.TYPE, newArray.getClass().getComponentType()); final boolean[] array1 = { true, false, true }; newArray = ArrayUtils.addFirst(array1, false); assertArrayEquals(new boolean[] { false, true, false, true }, newArray); assertEquals(Boolean.TYPE, newArray.getClass().getComponentType()); } @Test void testAddFirstByte() { byte[] newArray; newArray = ArrayUtils.addFirst((byte[]) null, (byte) 0); assertArrayEquals(new byte[] { 0 }, newArray); assertEquals(Byte.TYPE, newArray.getClass().getComponentType()); newArray = ArrayUtils.addFirst((byte[]) null, (byte) 1); assertArrayEquals(new byte[] { 1 }, newArray); assertEquals(Byte.TYPE, newArray.getClass().getComponentType()); final byte[] array1 = { 1, 2, 3 }; newArray = ArrayUtils.addFirst(array1, (byte) 0); assertArrayEquals(new byte[] { 0, 1, 2, 3 }, newArray); assertEquals(Byte.TYPE, newArray.getClass().getComponentType()); newArray = ArrayUtils.addFirst(array1, (byte) 4); assertArrayEquals(new byte[] { 4, 1, 2, 3 }, newArray); assertEquals(Byte.TYPE, newArray.getClass().getComponentType()); } @Test void testAddFirstChar() { char[] newArray; newArray = ArrayUtils.addFirst((char[]) null, (char) 0); assertArrayEquals(new char[] { 0 }, newArray); assertEquals(Character.TYPE, newArray.getClass().getComponentType()); newArray = ArrayUtils.addFirst((char[]) null, (char) 1); assertArrayEquals(new char[] { 1 }, newArray); assertEquals(Character.TYPE, newArray.getClass().getComponentType()); final char[] array1 = { 1, 2, 3 }; newArray = ArrayUtils.addFirst(array1, (char) 0); assertArrayEquals(new char[] { 0, 1, 2, 3 }, newArray); assertEquals(Character.TYPE, newArray.getClass().getComponentType()); newArray = ArrayUtils.addFirst(array1, (char) 4); assertArrayEquals(new char[] { 4, 1, 2, 3 }, newArray); assertEquals(Character.TYPE, newArray.getClass().getComponentType()); } @Test void testAddFirstDouble() { double[] newArray; newArray = ArrayUtils.addFirst((double[]) null, 0); assertArrayEquals(new double[] { 0 }, newArray); assertEquals(Double.TYPE, newArray.getClass().getComponentType()); newArray = ArrayUtils.addFirst((double[]) null, 1); assertArrayEquals(new double[] { 1 }, newArray); assertEquals(Double.TYPE, newArray.getClass().getComponentType()); final double[] array1 = { 1, 2, 3 }; newArray = ArrayUtils.addFirst(array1, 0); assertArrayEquals(new double[] { 0, 1, 2, 3 }, newArray); assertEquals(Double.TYPE, newArray.getClass().getComponentType()); newArray = ArrayUtils.addFirst(array1, 4); assertArrayEquals(new double[] { 4, 1, 2, 3 }, newArray); assertEquals(Double.TYPE, newArray.getClass().getComponentType()); } @Test void testAddFirstFloat() { float[] newArray; newArray = ArrayUtils.addFirst((float[]) null, 0); assertArrayEquals(new float[] { 0 }, newArray); assertEquals(Float.TYPE, newArray.getClass().getComponentType()); newArray = ArrayUtils.addFirst((float[]) null, 1); assertArrayEquals(new float[] { 1 }, newArray); assertEquals(Float.TYPE, newArray.getClass().getComponentType()); final float[] array1 = { 1, 2, 3 }; newArray = ArrayUtils.addFirst(array1, 0); assertArrayEquals(new float[] { 0, 1, 2, 3 }, newArray); assertEquals(Float.TYPE, newArray.getClass().getComponentType()); newArray = ArrayUtils.addFirst(array1, 4); assertArrayEquals(new float[] { 4, 1, 2, 3 }, newArray); assertEquals(Float.TYPE, newArray.getClass().getComponentType()); } @Test void testAddFirstInt() { int[] newArray; newArray = ArrayUtils.addFirst((int[]) null, 0); assertArrayEquals(new int[] { 0 }, newArray); assertEquals(Integer.TYPE, newArray.getClass().getComponentType()); newArray = ArrayUtils.addFirst((int[]) null, 1); assertArrayEquals(new int[] { 1 }, newArray); assertEquals(Integer.TYPE, newArray.getClass().getComponentType()); final int[] array1 = { 1, 2, 3 }; newArray = ArrayUtils.addFirst(array1, 0); assertArrayEquals(new int[] { 0, 1, 2, 3 }, newArray); assertEquals(Integer.TYPE, newArray.getClass().getComponentType()); newArray = ArrayUtils.addFirst(array1, 4); assertArrayEquals(new int[] { 4, 1, 2, 3 }, newArray); assertEquals(Integer.TYPE, newArray.getClass().getComponentType()); } @Test void testAddFirstLong() { long[] newArray; newArray = ArrayUtils.addFirst((long[]) null, 0); assertArrayEquals(new long[] { 0 }, newArray); assertEquals(Long.TYPE, newArray.getClass().getComponentType()); newArray = ArrayUtils.addFirst((long[]) null, 1); assertArrayEquals(new long[] { 1 }, newArray); assertEquals(Long.TYPE, newArray.getClass().getComponentType()); final long[] array1 = { 1, 2, 3 }; newArray = ArrayUtils.addFirst(array1, 0); assertArrayEquals(new long[] { 0, 1, 2, 3 }, newArray); assertEquals(Long.TYPE, newArray.getClass().getComponentType()); newArray = ArrayUtils.addFirst(array1, 4); assertArrayEquals(new long[] { 4, 1, 2, 3 }, newArray); assertEquals(Long.TYPE, newArray.getClass().getComponentType()); } @Test void testAddFirstObject() { Object[] newArray; // show that not casting is okay newArray = ArrayUtils.add((Object[]) null, "a"); assertArrayEquals(new String[] { "a" }, newArray); assertArrayEquals(new Object[] { "a" }, newArray); assertEquals(String.class, newArray.getClass().getComponentType()); // show that not casting to Object[] is okay and will assume String based on "a" final String[] newStringArray = ArrayUtils.add(null, "a"); assertArrayEquals(new String[] { "a" }, newStringArray); assertArrayEquals(new Object[] { "a" }, newStringArray); assertEquals(String.class, newStringArray.getClass().getComponentType()); final String[] stringArray1 = { "a", "b", "c" }; newArray = ArrayUtils.addFirst(stringArray1, null); assertArrayEquals(new String[] { null, "a", "b", "c" }, newArray); assertEquals(String.class, newArray.getClass().getComponentType()); newArray = ArrayUtils.addFirst(stringArray1, "d"); assertArrayEquals(new String[] { "d", "a", "b", "c" }, newArray); assertEquals(String.class, newArray.getClass().getComponentType()); Number[] numberArray1 = { Integer.valueOf(1), Double.valueOf(2) }; newArray = ArrayUtils.addFirst(numberArray1, Float.valueOf(3)); assertArrayEquals(new Number[] { Float.valueOf(3), Integer.valueOf(1), Double.valueOf(2) }, newArray); assertEquals(Number.class, newArray.getClass().getComponentType()); numberArray1 = null; newArray = ArrayUtils.addFirst(numberArray1, Float.valueOf(3)); assertArrayEquals(new Float[] { Float.valueOf(3) }, newArray); assertEquals(Float.class, newArray.getClass().getComponentType()); } @Test void testAddFirstShort() { short[] newArray; newArray = ArrayUtils.addFirst((short[]) null, (short) 0); assertArrayEquals(new short[] { 0 }, newArray); assertEquals(Short.TYPE, newArray.getClass().getComponentType()); newArray = ArrayUtils.addFirst((short[]) null, (short) 1); assertArrayEquals(new short[] { 1 }, newArray); assertEquals(Short.TYPE, newArray.getClass().getComponentType()); final short[] array1 = { 1, 2, 3 }; newArray = ArrayUtils.addFirst(array1, (short) 0); assertArrayEquals(new short[] { 0, 1, 2, 3 }, newArray); assertEquals(Short.TYPE, newArray.getClass().getComponentType()); newArray = ArrayUtils.addFirst(array1, (short) 4); assertArrayEquals(new short[] { 4, 1, 2, 3 }, newArray); assertEquals(Short.TYPE, newArray.getClass().getComponentType()); } @Test void testAddObjectArrayBoolean() { boolean[] newArray; newArray = ArrayUtils.add(null, false); assertArrayEquals(new boolean[] { false }, newArray); assertEquals(Boolean.TYPE, newArray.getClass().getComponentType()); newArray = ArrayUtils.add(null, true); assertArrayEquals(new boolean[] { true }, newArray); assertEquals(Boolean.TYPE, newArray.getClass().getComponentType()); final boolean[] array1 = { true, false, true }; newArray = ArrayUtils.add(array1, false); assertArrayEquals(new boolean[] { true, false, true, false }, newArray); assertEquals(Boolean.TYPE, newArray.getClass().getComponentType()); } @Test void testAddObjectArrayByte() { byte[] newArray; newArray = ArrayUtils.add((byte[]) null, (byte) 0); assertArrayEquals(new byte[] { 0 }, newArray); assertEquals(Byte.TYPE, newArray.getClass().getComponentType()); newArray = ArrayUtils.add((byte[]) null, (byte) 1); assertArrayEquals(new byte[] { 1 }, newArray); assertEquals(Byte.TYPE, newArray.getClass().getComponentType()); final byte[] array1 = { 1, 2, 3 }; newArray = ArrayUtils.add(array1, (byte) 0); assertArrayEquals(new byte[] { 1, 2, 3, 0 }, newArray); assertEquals(Byte.TYPE, newArray.getClass().getComponentType()); newArray = ArrayUtils.add(array1, (byte) 4); assertArrayEquals(new byte[] { 1, 2, 3, 4 }, newArray); assertEquals(Byte.TYPE, newArray.getClass().getComponentType()); } @Test void testAddObjectArrayChar() { char[] newArray; newArray = ArrayUtils.add((char[]) null, (char) 0); assertArrayEquals(new char[] { 0 }, newArray); assertEquals(Character.TYPE, newArray.getClass().getComponentType()); newArray = ArrayUtils.add((char[]) null, (char) 1); assertArrayEquals(new char[] { 1 }, newArray); assertEquals(Character.TYPE, newArray.getClass().getComponentType()); final char[] array1 = { 1, 2, 3 }; newArray = ArrayUtils.add(array1, (char) 0); assertArrayEquals(new char[] { 1, 2, 3, 0 }, newArray); assertEquals(Character.TYPE, newArray.getClass().getComponentType()); newArray = ArrayUtils.add(array1, (char) 4); assertArrayEquals(new char[] { 1, 2, 3, 4 }, newArray); assertEquals(Character.TYPE, newArray.getClass().getComponentType()); } @Test void testAddObjectArrayDouble() { double[] newArray; newArray = ArrayUtils.add((double[]) null, 0); assertArrayEquals(new double[] { 0 }, newArray); assertEquals(Double.TYPE, newArray.getClass().getComponentType()); newArray = ArrayUtils.add((double[]) null, 1); assertArrayEquals(new double[] { 1 }, newArray); assertEquals(Double.TYPE, newArray.getClass().getComponentType()); final double[] array1 = { 1, 2, 3 }; newArray = ArrayUtils.add(array1, 0); assertArrayEquals(new double[] { 1, 2, 3, 0 }, newArray); assertEquals(Double.TYPE, newArray.getClass().getComponentType()); newArray = ArrayUtils.add(array1, 4); assertArrayEquals(new double[] { 1, 2, 3, 4 }, newArray); assertEquals(Double.TYPE, newArray.getClass().getComponentType()); } @Test void testAddObjectArrayFloat() { float[] newArray; newArray = ArrayUtils.add((float[]) null, 0); assertArrayEquals(new float[] { 0 }, newArray); assertEquals(Float.TYPE, newArray.getClass().getComponentType()); newArray = ArrayUtils.add((float[]) null, 1); assertArrayEquals(new float[] { 1 }, newArray); assertEquals(Float.TYPE, newArray.getClass().getComponentType()); final float[] array1 = { 1, 2, 3 }; newArray = ArrayUtils.add(array1, 0); assertArrayEquals(new float[] { 1, 2, 3, 0 }, newArray); assertEquals(Float.TYPE, newArray.getClass().getComponentType()); newArray = ArrayUtils.add(array1, 4); assertArrayEquals(new float[] { 1, 2, 3, 4 }, newArray); assertEquals(Float.TYPE, newArray.getClass().getComponentType()); } @Test void testAddObjectArrayInt() { int[] newArray; newArray = ArrayUtils.add((int[]) null, 0); assertArrayEquals(new int[] { 0 }, newArray); assertEquals(Integer.TYPE, newArray.getClass().getComponentType()); newArray = ArrayUtils.add((int[]) null, 1); assertArrayEquals(new int[] { 1 }, newArray); assertEquals(Integer.TYPE, newArray.getClass().getComponentType()); final int[] array1 = { 1, 2, 3 }; newArray = ArrayUtils.add(array1, 0); assertArrayEquals(new int[] { 1, 2, 3, 0 }, newArray); assertEquals(Integer.TYPE, newArray.getClass().getComponentType()); newArray = ArrayUtils.add(array1, 4); assertArrayEquals(new int[] { 1, 2, 3, 4 }, newArray); assertEquals(Integer.TYPE, newArray.getClass().getComponentType()); } @Test void testAddObjectArrayLong() { long[] newArray; newArray = ArrayUtils.add((long[]) null, 0); assertArrayEquals(new long[] { 0 }, newArray); assertEquals(Long.TYPE, newArray.getClass().getComponentType()); newArray = ArrayUtils.add((long[]) null, 1); assertArrayEquals(new long[] { 1 }, newArray); assertEquals(Long.TYPE, newArray.getClass().getComponentType()); final long[] array1 = { 1, 2, 3 }; newArray = ArrayUtils.add(array1, 0); assertArrayEquals(new long[] { 1, 2, 3, 0 }, newArray); assertEquals(Long.TYPE, newArray.getClass().getComponentType()); newArray = ArrayUtils.add(array1, 4); assertArrayEquals(new long[] { 1, 2, 3, 4 }, newArray); assertEquals(Long.TYPE, newArray.getClass().getComponentType()); } @Test void testAddObjectArrayObject() { Object[] newArray; // show that not casting is okay newArray = ArrayUtils.add((Object[]) null, "a"); assertArrayEquals(new String[] { "a" }, newArray); assertArrayEquals(new Object[] { "a" }, newArray); assertEquals(String.class, newArray.getClass().getComponentType()); // show that not casting to Object[] is okay and will assume String based on "a" final String[] newStringArray = ArrayUtils.add(null, "a"); assertArrayEquals(new String[] { "a" }, newStringArray); assertArrayEquals(new Object[] { "a" }, newStringArray); assertEquals(String.class, newStringArray.getClass().getComponentType()); final String[] stringArray1 = { "a", "b", "c" }; newArray = ArrayUtils.add(stringArray1, null); assertArrayEquals(new String[] { "a", "b", "c", null }, newArray); assertEquals(String.class, newArray.getClass().getComponentType()); newArray = ArrayUtils.add(stringArray1, "d"); assertArrayEquals(new String[] { "a", "b", "c", "d" }, newArray); assertEquals(String.class, newArray.getClass().getComponentType()); Number[] numberArray1 = { Integer.valueOf(1), Double.valueOf(2) }; newArray = ArrayUtils.add(numberArray1, Float.valueOf(3)); assertArrayEquals(new Number[] { Integer.valueOf(1), Double.valueOf(2), Float.valueOf(3) }, newArray); assertEquals(Number.class, newArray.getClass().getComponentType()); numberArray1 = null; newArray = ArrayUtils.add(numberArray1, Float.valueOf(3)); assertArrayEquals(new Float[] { Float.valueOf(3) }, newArray); assertEquals(Float.class, newArray.getClass().getComponentType()); } @Test void testAddObjectArrayShort() { short[] newArray; newArray = ArrayUtils.add((short[]) null, (short) 0); assertArrayEquals(new short[] { 0 }, newArray); assertEquals(Short.TYPE, newArray.getClass().getComponentType()); newArray = ArrayUtils.add((short[]) null, (short) 1); assertArrayEquals(new short[] { 1 }, newArray); assertEquals(Short.TYPE, newArray.getClass().getComponentType()); final short[] array1 = { 1, 2, 3 }; newArray = ArrayUtils.add(array1, (short) 0); assertArrayEquals(new short[] { 1, 2, 3, 0 }, newArray); assertEquals(Short.TYPE, newArray.getClass().getComponentType()); newArray = ArrayUtils.add(array1, (short) 4); assertArrayEquals(new short[] { 1, 2, 3, 4 }, newArray); assertEquals(Short.TYPE, newArray.getClass().getComponentType()); } @Test void testAddObjectArrayToObjectArray() { assertNull(ArrayUtils.addAll(null, (Object[]) null)); Object[] newArray; final String[] stringArray1 = { "a", "b", "c" }; final String[] stringArray2 = { "1", "2", "3" }; newArray = ArrayUtils.addAll(stringArray1, (String[]) null); assertNotSame(stringArray1, newArray); assertArrayEquals(stringArray1, newArray); assertArrayEquals(new String[] { "a", "b", "c" }, newArray); assertEquals(String.class, newArray.getClass().getComponentType()); newArray = ArrayUtils.addAll(null, stringArray2); assertNotSame(stringArray2, newArray); assertArrayEquals(stringArray2, newArray); assertArrayEquals(new String[] { "1", "2", "3" }, newArray); assertEquals(String.class, newArray.getClass().getComponentType()); newArray = ArrayUtils.addAll(stringArray1, stringArray2); assertArrayEquals(new String[] { "a", "b", "c", "1", "2", "3" }, newArray); assertEquals(String.class, newArray.getClass().getComponentType()); newArray = ArrayUtils.addAll(ArrayUtils.EMPTY_STRING_ARRAY, (String[]) null); assertArrayEquals(ArrayUtils.EMPTY_STRING_ARRAY, newArray); assertArrayEquals(new String[] {}, newArray); assertEquals(String.class, newArray.getClass().getComponentType()); newArray = ArrayUtils.addAll(null, ArrayUtils.EMPTY_STRING_ARRAY); assertArrayEquals(ArrayUtils.EMPTY_STRING_ARRAY, newArray); assertArrayEquals(new String[] {}, newArray); assertEquals(String.class, newArray.getClass().getComponentType()); newArray = ArrayUtils.addAll(ArrayUtils.EMPTY_STRING_ARRAY, ArrayUtils.EMPTY_STRING_ARRAY); assertArrayEquals(ArrayUtils.EMPTY_STRING_ARRAY, newArray); assertArrayEquals(new String[] {}, newArray); assertEquals(String.class, newArray.getClass().getComponentType()); final String[] stringArrayNull = { null }; newArray = ArrayUtils.addAll(stringArrayNull, stringArrayNull); assertArrayEquals(new String[] { null, null }, newArray); assertEquals(String.class, newArray.getClass().getComponentType()); // boolean assertArrayEquals(new boolean[] { true, false, false, true }, ArrayUtils.addAll(new boolean[] { true, false }, false, true)); assertArrayEquals(new boolean[] { false, true }, ArrayUtils.addAll(null, new boolean[] { false, true })); assertArrayEquals(new boolean[] { true, false }, ArrayUtils.addAll(new boolean[] { true, false }, null)); // char assertArrayEquals(new char[] { 'a', 'b', 'c', 'd' }, ArrayUtils.addAll(new char[] { 'a', 'b' }, 'c', 'd')); assertArrayEquals(new char[] { 'c', 'd' }, ArrayUtils.addAll(null, new char[] { 'c', 'd' })); assertArrayEquals(new char[] { 'a', 'b' }, ArrayUtils.addAll(new char[] { 'a', 'b' }, null)); // byte assertArrayEquals(new byte[] { (byte) 0, (byte) 1, (byte) 2, (byte) 3 }, ArrayUtils.addAll(new byte[] { (byte) 0, (byte) 1 }, (byte) 2, (byte) 3)); assertArrayEquals(new byte[] { (byte) 2, (byte) 3 }, ArrayUtils.addAll(null, new byte[] { (byte) 2, (byte) 3 })); assertArrayEquals(new byte[] { (byte) 0, (byte) 1 }, ArrayUtils.addAll(new byte[] { (byte) 0, (byte) 1 }, null)); // short assertArrayEquals(new short[] { (short) 10, (short) 20, (short) 30, (short) 40 }, ArrayUtils.addAll(new short[] { (short) 10, (short) 20 }, (short) 30, (short) 40)); assertArrayEquals(new short[] { (short) 30, (short) 40 }, ArrayUtils.addAll(null, new short[] { (short) 30, (short) 40 })); assertArrayEquals(new short[] { (short) 10, (short) 20 }, ArrayUtils.addAll(new short[] { (short) 10, (short) 20 }, null)); // int assertArrayEquals(new int[] { 1, 1000, -1000, -1 }, ArrayUtils.addAll(new int[] { 1, 1000 }, -1000, -1)); assertArrayEquals(new int[] { -1000, -1 }, ArrayUtils.addAll(null, new int[] { -1000, -1 })); assertArrayEquals(new int[] { 1, 1000 }, ArrayUtils.addAll(new int[] { 1, 1000 }, null)); // long assertArrayEquals(new long[] { 1L, -1L, 1000L, -1000L }, ArrayUtils.addAll(new long[] { 1L, -1L }, 1000L, -1000L)); assertArrayEquals(new long[] { 1000L, -1000L }, ArrayUtils.addAll(null, new long[] { 1000L, -1000L })); assertArrayEquals(new long[] { 1L, -1L }, ArrayUtils.addAll(new long[] { 1L, -1L }, null)); // float assertArrayEquals(new float[] { 10.5f, 10.1f, 1.6f, 0.01f }, ArrayUtils.addAll(new float[] { 10.5f, 10.1f }, 1.6f, 0.01f)); assertArrayEquals(new float[] { 1.6f, 0.01f }, ArrayUtils.addAll(null, new float[] { 1.6f, 0.01f })); assertArrayEquals(new float[] { 10.5f, 10.1f }, ArrayUtils.addAll(new float[] { 10.5f, 10.1f }, null)); // double assertArrayEquals(new double[] { Math.PI, -Math.PI, 0, 9.99 }, ArrayUtils.addAll(new double[] { Math.PI, -Math.PI }, 0, 9.99)); assertArrayEquals(new double[] { 0, 9.99 }, ArrayUtils.addAll(null, new double[] { 0, 9.99 })); assertArrayEquals(new double[] { Math.PI, -Math.PI }, ArrayUtils.addAll(new double[] { Math.PI, -Math.PI }, null)); } @SuppressWarnings("deprecation") @Test void testAddObjectAtIndex() { Object[] newArray; newArray = ArrayUtils.add((Object[]) null, 0, "a"); assertArrayEquals(new String[] { "a" }, newArray); assertArrayEquals(new Object[] { "a" }, newArray); assertEquals(String.class, newArray.getClass().getComponentType()); final String[] stringArray1 = { "a", "b", "c" }; newArray = ArrayUtils.add(stringArray1, 0, null); assertArrayEquals(new String[] { null, "a", "b", "c" }, newArray); assertEquals(String.class, newArray.getClass().getComponentType()); newArray = ArrayUtils.add(stringArray1, 1, null); assertArrayEquals(new String[] { "a", null, "b", "c" }, newArray); assertEquals(String.class, newArray.getClass().getComponentType()); newArray = ArrayUtils.add(stringArray1, 3, null); assertArrayEquals(new String[] { "a", "b", "c", null }, newArray); assertEquals(String.class, newArray.getClass().getComponentType()); newArray = ArrayUtils.add(stringArray1, 3, "d"); assertArrayEquals(new String[] { "a", "b", "c", "d" }, newArray); assertEquals(String.class, newArray.getClass().getComponentType()); assertEquals(String.class, newArray.getClass().getComponentType()); final Object[] o = { "1", "2", "4" }; final Object[] result = ArrayUtils.add(o, 2, "3"); final Object[] result2 = ArrayUtils.add(o, 3, "5"); assertNotNull(result); assertEquals(4, result.length); assertEquals("1", result[0]); assertEquals("2", result[1]); assertEquals("3", result[2]); assertEquals("4", result[3]); assertNotNull(result2); assertEquals(4, result2.length); assertEquals("1", result2[0]); assertEquals("2", result2[1]); assertEquals("4", result2[2]); assertEquals("5", result2[3]); // boolean tests boolean[] booleanArray = ArrayUtils.add(null, 0, true); assertArrayEquals(new boolean[] { true }, booleanArray); IndexOutOfBoundsException e = assertIndexOutOfBoundsException(() -> ArrayUtils.add(null, -1, true)); assertEquals("Index: -1, Length: 0", e.getMessage()); booleanArray = ArrayUtils.add(new boolean[] { true }, 0, false); assertArrayEquals(new boolean[] { false, true }, booleanArray); booleanArray = ArrayUtils.add(new boolean[] { false }, 1, true); assertArrayEquals(new boolean[] { false, true }, booleanArray); booleanArray = ArrayUtils.add(new boolean[] { true, false }, 1, true); assertArrayEquals(new boolean[] { true, true, false }, booleanArray); e = assertIndexOutOfBoundsException(() -> ArrayUtils.add(new boolean[] { true, false }, 4, true)); assertEquals("Index: 4, Length: 2", e.getMessage()); e = assertIndexOutOfBoundsException(() -> ArrayUtils.add(new boolean[] { true, false }, -1, true)); assertEquals("Index: -1, Length: 2", e.getMessage()); // char tests char[] charArray = ArrayUtils.add((char[]) null, 0, 'a'); assertArrayEquals(new char[] { 'a' }, charArray); e = assertIndexOutOfBoundsException(() -> ArrayUtils.add((char[]) null, -1, 'a')); assertEquals("Index: -1, Length: 0", e.getMessage()); charArray = ArrayUtils.add(new char[] { 'a' }, 0, 'b'); assertArrayEquals(new char[] { 'b', 'a' }, charArray); charArray = ArrayUtils.add(new char[] { 'a', 'b' }, 0, 'c'); assertArrayEquals(new char[] { 'c', 'a', 'b' }, charArray); charArray = ArrayUtils.add(new char[] { 'a', 'b' }, 1, 'k'); assertArrayEquals(new char[] { 'a', 'k', 'b' }, charArray); charArray = ArrayUtils.add(new char[] { 'a', 'b', 'c' }, 1, 't'); assertArrayEquals(new char[] { 'a', 't', 'b', 'c' }, charArray); e = assertIndexOutOfBoundsException(() -> ArrayUtils.add(new char[] { 'a', 'b' }, 4, 'c')); assertEquals("Index: 4, Length: 2", e.getMessage()); e = assertIndexOutOfBoundsException(() -> ArrayUtils.add(new char[] { 'a', 'b' }, -1, 'c')); assertEquals("Index: -1, Length: 2", e.getMessage()); // short tests short[] shortArray = ArrayUtils.add(new short[] { 1 }, 0, (short) 2); assertArrayEquals(new short[] { 2, 1 }, shortArray); e = assertIndexOutOfBoundsException(() -> ArrayUtils.add((short[]) null, -1, (short) 2)); assertEquals("Index: -1, Length: 0", e.getMessage()); shortArray = ArrayUtils.add(new short[] { 2, 6 }, 2, (short) 10); assertArrayEquals(new short[] { 2, 6, 10 }, shortArray); shortArray = ArrayUtils.add(new short[] { 2, 6 }, 0, (short) -4); assertArrayEquals(new short[] { -4, 2, 6 }, shortArray); shortArray = ArrayUtils.add(new short[] { 2, 6, 3 }, 2, (short) 1); assertArrayEquals(new short[] { 2, 6, 1, 3 }, shortArray); e = assertIndexOutOfBoundsException(() -> ArrayUtils.add(new short[] { 2, 6 }, 4, (short) 10)); assertEquals("Index: 4, Length: 2", e.getMessage()); e = assertIndexOutOfBoundsException(() -> ArrayUtils.add(new short[] { 2, 6 }, -1, (short) 10)); assertEquals("Index: -1, Length: 2", e.getMessage()); // byte tests byte[] byteArray = ArrayUtils.add(new byte[] { 1 }, 0, (byte) 2); assertArrayEquals(new byte[] { 2, 1 }, byteArray); e = assertIndexOutOfBoundsException(() -> ArrayUtils.add((byte[]) null, -1, (byte) 2)); assertEquals("Index: -1, Length: 0", e.getMessage()); byteArray = ArrayUtils.add(new byte[] { 2, 6 }, 2, (byte) 3); assertArrayEquals(new byte[] { 2, 6, 3 }, byteArray); byteArray = ArrayUtils.add(new byte[] { 2, 6 }, 0, (byte) 1); assertArrayEquals(new byte[] { 1, 2, 6 }, byteArray); byteArray = ArrayUtils.add(new byte[] { 2, 6, 3 }, 2, (byte) 1); assertArrayEquals(new byte[] { 2, 6, 1, 3 }, byteArray); e = assertIndexOutOfBoundsException(() -> ArrayUtils.add(new byte[] { 2, 6 }, 4, (byte) 3)); assertEquals("Index: 4, Length: 2", e.getMessage()); e = assertIndexOutOfBoundsException(() -> ArrayUtils.add(new byte[] { 2, 6 }, -1, (byte) 3)); assertEquals("Index: -1, Length: 2", e.getMessage()); // int tests int[] intArray = ArrayUtils.add(new int[] { 1 }, 0, 2); assertArrayEquals(new int[] { 2, 1 }, intArray); e = assertIndexOutOfBoundsException(() -> ArrayUtils.add((int[]) null, -1, 2)); assertEquals("Index: -1, Length: 0", e.getMessage()); intArray = ArrayUtils.add(new int[] { 2, 6 }, 2, 10); assertArrayEquals(new int[] { 2, 6, 10 }, intArray); intArray = ArrayUtils.add(new int[] { 2, 6 }, 0, -4); assertArrayEquals(new int[] { -4, 2, 6 }, intArray); intArray = ArrayUtils.add(new int[] { 2, 6, 3 }, 2, 1); assertArrayEquals(new int[] { 2, 6, 1, 3 }, intArray); e = assertIndexOutOfBoundsException(() -> ArrayUtils.add(new int[] { 2, 6 }, 4, 10)); assertEquals("Index: 4, Length: 2", e.getMessage()); e = assertIndexOutOfBoundsException(() -> ArrayUtils.add(new int[] { 2, 6 }, -1, 10)); assertEquals("Index: -1, Length: 2", e.getMessage()); // long tests long[] longArray = ArrayUtils.add(new long[] { 1L }, 0, 2L); assertArrayEquals(new long[] { 2L, 1L }, longArray); e = assertIndexOutOfBoundsException(() -> ArrayUtils.add((long[]) null, -1, 2L)); assertEquals("Index: -1, Length: 0", e.getMessage()); longArray = ArrayUtils.add(new long[] { 2L, 6L }, 2, 10L); assertArrayEquals(new long[] { 2L, 6L, 10L }, longArray); longArray = ArrayUtils.add(new long[] { 2L, 6L }, 0, -4L); assertArrayEquals(new long[] { -4L, 2L, 6L }, longArray); longArray = ArrayUtils.add(new long[] { 2L, 6L, 3L }, 2, 1L); assertArrayEquals(new long[] { 2L, 6L, 1L, 3L }, longArray); e = assertIndexOutOfBoundsException(() -> ArrayUtils.add(new long[] { 2L, 6L }, 4, 10L)); assertEquals("Index: 4, Length: 2", e.getMessage()); e = assertIndexOutOfBoundsException(() -> ArrayUtils.add(new long[] { 2L, 6L }, -1, 10L)); assertEquals("Index: -1, Length: 2", e.getMessage()); // float tests float[] floatArray = ArrayUtils.add(new float[] { 1.1f }, 0, 2.2f); assertArrayEquals(new float[] { 2.2f, 1.1f }, floatArray); e = assertIndexOutOfBoundsException(() -> ArrayUtils.add((float[]) null, -1, 2.2f)); assertEquals("Index: -1, Length: 0", e.getMessage()); floatArray = ArrayUtils.add(new float[] { 2.3f, 6.4f }, 2, 10.5f); assertArrayEquals(new float[] { 2.3f, 6.4f, 10.5f }, floatArray); floatArray = ArrayUtils.add(new float[] { 2.6f, 6.7f }, 0, -4.8f); assertArrayEquals(new float[] { -4.8f, 2.6f, 6.7f }, floatArray); floatArray = ArrayUtils.add(new float[] { 2.9f, 6.0f, 0.3f }, 2, 1.0f); assertArrayEquals(new float[] { 2.9f, 6.0f, 1.0f, 0.3f }, floatArray); e = assertIndexOutOfBoundsException(() -> ArrayUtils.add(new float[] { 2.3f, 6.4f }, 4, 10.5f)); assertEquals("Index: 4, Length: 2", e.getMessage()); e = assertIndexOutOfBoundsException(() -> ArrayUtils.add(new float[] { 2.3f, 6.4f }, -1, 10.5f)); assertEquals("Index: -1, Length: 2", e.getMessage()); // double tests double[] doubleArray = ArrayUtils.add(new double[] { 1.1 }, 0, 2.2); assertArrayEquals(new double[] { 2.2, 1.1 }, doubleArray); e = assertIndexOutOfBoundsException(() -> ArrayUtils.add(null, -1, 2.2)); assertEquals("Index: -1, Length: 0", e.getMessage()); doubleArray = ArrayUtils.add(new double[] { 2.3, 6.4 }, 2, 10.5); assertArrayEquals(new double[] { 2.3, 6.4, 10.5 }, doubleArray); doubleArray = ArrayUtils.add(new double[] { 2.6, 6.7 }, 0, -4.8); assertArrayEquals(new double[] { -4.8, 2.6, 6.7 }, doubleArray); doubleArray = ArrayUtils.add(new double[] { 2.9, 6.0, 0.3 }, 2, 1.0); assertArrayEquals(new double[] { 2.9, 6.0, 1.0, 0.3 }, doubleArray); e = assertIndexOutOfBoundsException(() -> ArrayUtils.add(new double[] { 2.3, 6.4 }, 4, 10.5)); assertEquals("Index: 4, Length: 2", e.getMessage()); e = assertIndexOutOfBoundsException(() -> ArrayUtils.add(new double[] { 2.3, 6.4 }, -1, 10.5)); assertEquals("Index: -1, Length: 2", e.getMessage()); } @Test void testJira567() { final Number[] n; // Valid array construction n = ArrayUtils.addAll(new Number[] { Integer.valueOf(1) }, new Long[] { Long.valueOf(2) }); assertEquals(2, n.length); assertEquals(Number.class, n.getClass().getComponentType()); // Invalid - can't store Long in Integer array assertIllegalArgumentException(() -> ArrayUtils.addAll(new Integer[] { Integer.valueOf(1) }, new Long[] { Long.valueOf(2) })); } @Test @SuppressWarnings("deprecation") void testLANG571() { final String[] stringArray = null; final String aString = null; assertIllegalArgumentException(() -> ArrayUtils.add(stringArray, aString)); assertIllegalArgumentException(() -> ArrayUtils.add(stringArray, 0, aString)); } }
apache/ignite
35,184
modules/core/src/main/java/org/apache/ignite/internal/jdbc2/JdbcConnection.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.jdbc2; import java.net.URL; import java.sql.Array; import java.sql.Blob; import java.sql.CallableStatement; import java.sql.Clob; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.NClob; import java.sql.PreparedStatement; import java.sql.SQLClientInfoException; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.sql.SQLWarning; import java.sql.SQLXML; import java.sql.Savepoint; import java.sql.Statement; import java.sql.Struct; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicInteger; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteClientDisconnectedException; import org.apache.ignite.IgniteDataStreamer; import org.apache.ignite.IgniteException; import org.apache.ignite.IgniteJdbcDriver; import org.apache.ignite.cache.query.SqlFieldsQuery; import org.apache.ignite.cluster.ClusterGroup; import org.apache.ignite.compute.ComputeTaskTimeoutException; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.IgniteEx; import org.apache.ignite.internal.IgniteKernal; import org.apache.ignite.internal.IgnitionEx; import org.apache.ignite.internal.processors.cache.DynamicCacheDescriptor; import org.apache.ignite.internal.processors.cache.query.IgniteQueryErrorCode; import org.apache.ignite.internal.processors.odbc.SqlStateCode; import org.apache.ignite.internal.processors.query.GridQueryIndexing; import org.apache.ignite.internal.processors.query.IgniteSQLException; import org.apache.ignite.internal.processors.query.QueryUtils; import org.apache.ignite.internal.processors.resource.GridSpringResourceContext; import org.apache.ignite.internal.util.future.GridFutureAdapter; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteBiTuple; import org.apache.ignite.lang.IgniteCallable; import org.apache.ignite.lang.IgniteProductVersion; import org.apache.ignite.resources.IgniteInstanceResource; import static java.sql.ResultSet.CONCUR_READ_ONLY; import static java.sql.ResultSet.HOLD_CURSORS_OVER_COMMIT; import static java.sql.ResultSet.TYPE_FORWARD_ONLY; import static java.util.concurrent.TimeUnit.SECONDS; import static org.apache.ignite.IgniteJdbcDriver.PROP_CACHE; import static org.apache.ignite.IgniteJdbcDriver.PROP_CFG; import static org.apache.ignite.IgniteJdbcDriver.PROP_COLLOCATED; import static org.apache.ignite.IgniteJdbcDriver.PROP_DISTRIBUTED_JOINS; import static org.apache.ignite.IgniteJdbcDriver.PROP_ENFORCE_JOIN_ORDER; import static org.apache.ignite.IgniteJdbcDriver.PROP_LAZY; import static org.apache.ignite.IgniteJdbcDriver.PROP_LOCAL; import static org.apache.ignite.IgniteJdbcDriver.PROP_MULTIPLE_STMTS; import static org.apache.ignite.IgniteJdbcDriver.PROP_NODE_ID; import static org.apache.ignite.IgniteJdbcDriver.PROP_SCHEMA; import static org.apache.ignite.IgniteJdbcDriver.PROP_SKIP_REDUCER_ON_UPDATE; import static org.apache.ignite.IgniteJdbcDriver.PROP_STREAMING; import static org.apache.ignite.IgniteJdbcDriver.PROP_STREAMING_ALLOW_OVERWRITE; import static org.apache.ignite.IgniteJdbcDriver.PROP_STREAMING_FLUSH_FREQ; import static org.apache.ignite.IgniteJdbcDriver.PROP_STREAMING_PER_NODE_BUF_SIZE; import static org.apache.ignite.IgniteJdbcDriver.PROP_STREAMING_PER_NODE_PAR_OPS; import static org.apache.ignite.IgniteJdbcDriver.PROP_TX_ALLOWED; import static org.apache.ignite.cache.query.SqlFieldsQuery.DFLT_LAZY; import static org.apache.ignite.internal.GridClosureCallMode.BALANCE; import static org.apache.ignite.internal.jdbc2.JdbcUtils.convertToSqlException; import static org.apache.ignite.internal.processors.cache.query.IgniteQueryErrorCode.createJdbcSqlException; import static org.apache.ignite.internal.processors.task.TaskExecutionOptions.options; /** * JDBC connection implementation. */ public class JdbcConnection implements Connection { /** Null stub. */ private static final String NULL = "null"; /** Multiple statements supported since version. */ private static final IgniteProductVersion MULTIPLE_STATEMENTS_SUPPORTED_SINCE = IgniteProductVersion.fromString("2.4.0"); /** Multiple statements V2 task supported since version. */ private static final IgniteProductVersion MULTIPLE_STATEMENTS_TASK_V2_SUPPORTED_SINCE = IgniteProductVersion.fromString("2.8.0"); /** Close remote cursor task is supported since version. {@link JdbcCloseCursorTask}*/ private static final IgniteProductVersion CLOSE_CURSOR_TASK_SUPPORTED_SINCE = IgniteProductVersion.fromString("2.11.0"); /** Multiple statements V3 task supported since version. */ private static final IgniteProductVersion MULTIPLE_STATEMENTS_TASK_V3_SUPPORTED_SINCE = IgniteProductVersion.fromString("2.11.0"); /** * Ignite nodes cache. * * The key is result of concatenation of the following properties: * <ol> * <li>{@link IgniteJdbcDriver#PROP_CFG}</li> * </ol> */ private static final ConcurrentMap<String, IgniteNodeFuture> NODES = new ConcurrentHashMap<>(); /** Ignite ignite. */ private final Ignite ignite; /** Node key. */ private final String cfg; /** Cache name. */ private final String cacheName; /** Schema name. */ private String schemaName; /** Closed flag. */ private boolean closed; /** URL. */ private String url; /** Node ID. */ private UUID nodeId; /** Local query flag. */ private boolean locQry; /** Collocated query flag. */ private boolean collocatedQry; /** Distributed joins flag. */ private boolean distributedJoins; /** Enforced join order flag. */ private boolean enforceJoinOrder; /** Lazy query execution flag. */ private boolean lazy; /** Transactions allowed flag. */ private boolean txAllowed; /** Current transaction isolation. */ private int txIsolation; /** Make this connection streaming oriented, and prepared statements - data streamer aware. */ private final boolean stream; /** Auto flush frequency for streaming. */ private final long streamFlushTimeout; /** Node buffer size for data streamer. */ private final int streamNodeBufSize; /** Parallel ops count per node for data streamer. */ private final int streamNodeParOps; /** Allow overwrites for duplicate keys on streamed {@code INSERT}s. */ private final boolean streamAllowOverwrite; /** Allow queries with multiple statements. */ private final boolean multipleStmts; /** Skip reducer on update flag. */ private final boolean skipReducerOnUpdate; /** Statements. */ final Set<JdbcStatement> statements = new HashSet<>(); /** * Describes the client connection: * - thin cli: "cli:host:port@user_name" * - thin JDBC: "jdbc-thin:host:port@user_name" * - ODBC: "odbc:host:port@user_name" * * Used by the running query view to display query initiator. */ private final String clientDesc; /** * Creates new connection. * * @param url Connection URL. * @param props Additional properties. * @throws SQLException In case Ignite node failed to start. */ public JdbcConnection(String url, Properties props) throws SQLException { assert url != null; assert props != null; this.url = url; cacheName = props.getProperty(PROP_CACHE); locQry = Boolean.parseBoolean(props.getProperty(PROP_LOCAL)); collocatedQry = Boolean.parseBoolean(props.getProperty(PROP_COLLOCATED)); distributedJoins = Boolean.parseBoolean(props.getProperty(PROP_DISTRIBUTED_JOINS)); enforceJoinOrder = Boolean.parseBoolean(props.getProperty(PROP_ENFORCE_JOIN_ORDER)); lazy = Boolean.parseBoolean(props.getProperty(PROP_LAZY, String.valueOf(DFLT_LAZY))); txAllowed = Boolean.parseBoolean(props.getProperty(PROP_TX_ALLOWED)); stream = Boolean.parseBoolean(props.getProperty(PROP_STREAMING)); if (stream && cacheName == null) { throw new SQLException("Cache name cannot be null when streaming is enabled.", SqlStateCode.CLIENT_CONNECTION_FAILED); } streamAllowOverwrite = Boolean.parseBoolean(props.getProperty(PROP_STREAMING_ALLOW_OVERWRITE)); streamFlushTimeout = Long.parseLong(props.getProperty(PROP_STREAMING_FLUSH_FREQ, "0")); streamNodeBufSize = Integer.parseInt(props.getProperty(PROP_STREAMING_PER_NODE_BUF_SIZE, String.valueOf(IgniteDataStreamer.DFLT_PER_NODE_BUFFER_SIZE))); // If value is zero, server data-streamer pool size multiplied // by IgniteDataStreamer.DFLT_PARALLEL_OPS_MULTIPLIER will be used streamNodeParOps = Integer.parseInt(props.getProperty(PROP_STREAMING_PER_NODE_PAR_OPS, "0")); multipleStmts = Boolean.parseBoolean(props.getProperty(PROP_MULTIPLE_STMTS)); skipReducerOnUpdate = Boolean.parseBoolean(props.getProperty(PROP_SKIP_REDUCER_ON_UPDATE)); schemaName = QueryUtils.normalizeSchemaName(null, props.getProperty(PROP_SCHEMA)); String nodeIdProp = props.getProperty(PROP_NODE_ID); if (nodeIdProp != null) nodeId = UUID.fromString(nodeIdProp); try { String cfgUrl = props.getProperty(PROP_CFG); cfg = cfgUrl == null || cfgUrl.isEmpty() ? NULL : cfgUrl; ignite = getIgnite(cfg); if (!isValid(2)) { throw new SQLException("Client is invalid. Probably cache name is wrong.", SqlStateCode.CLIENT_CONNECTION_FAILED); } if (cacheName != null) { DynamicCacheDescriptor cacheDesc = ignite().context().cache().cacheDescriptor(cacheName); if (cacheDesc == null) { throw createJdbcSqlException("Cache doesn't exist: " + cacheName, IgniteQueryErrorCode.CACHE_NOT_FOUND); } if (schemaName == null) schemaName = QueryUtils.normalizeSchemaName(cacheName, cacheDesc.cacheConfiguration().getSqlSchema()); } else { if (schemaName == null) schemaName = QueryUtils.DFLT_SCHEMA; } clientDesc = "jdbc-v2:" + F.first(ignite.cluster().localNode().addresses()) + ":" + ignite.name(); } catch (Exception e) { close(); throw convertToSqlException(e, "Failed to start Ignite node. " + e.getMessage(), SqlStateCode.CLIENT_CONNECTION_FAILED); } } /** * @param cfgUrl Config url. * @return Ignite client node. * @throws IgniteCheckedException On error. */ private Ignite getIgnite(String cfgUrl) throws IgniteCheckedException { while (true) { IgniteNodeFuture fut = NODES.get(cfg); if (fut == null) { fut = new IgniteNodeFuture(); IgniteNodeFuture old = NODES.putIfAbsent(cfg, fut); if (old != null) fut = old; else { try { final IgniteBiTuple<IgniteConfiguration, ? extends GridSpringResourceContext> cfgAndCtx; String jdbcName = "ignite-jdbc-driver-" + UUID.randomUUID().toString(); if (NULL.equals(cfg)) { URL url = U.resolveIgniteUrl(IgnitionEx.DFLT_CFG); if (url != null) cfgAndCtx = loadConfiguration(IgnitionEx.DFLT_CFG, jdbcName); else { U.warn(null, "Default Spring XML file not found (is IGNITE_HOME set?): " + IgnitionEx.DFLT_CFG); IgniteConfiguration cfg = new IgniteConfiguration() .setIgniteInstanceName(jdbcName) .setClientMode(true); cfgAndCtx = new IgniteBiTuple<>(cfg, null); } } else cfgAndCtx = loadConfiguration(cfgUrl, jdbcName); fut.onDone(IgnitionEx.start(cfgAndCtx.get1(), cfgAndCtx.get2())); } catch (IgniteException e) { fut.onDone(e); } return fut.get(); } } if (fut.acquire()) return fut.get(); else NODES.remove(cfg, fut); } } /** * @param cfgUrl Config URL. * @param jdbcName Appended to instance name or used as default. * @return Ignite config and Spring context. */ private IgniteBiTuple<IgniteConfiguration, ? extends GridSpringResourceContext> loadConfiguration(String cfgUrl, String jdbcName) { try { IgniteBiTuple<Collection<IgniteConfiguration>, ? extends GridSpringResourceContext> cfgMap = IgnitionEx.loadConfigurations(cfgUrl); IgniteConfiguration cfg = F.first(cfgMap.get1()); if (cfg.getIgniteInstanceName() == null) cfg.setIgniteInstanceName(jdbcName); else cfg.setIgniteInstanceName(cfg.getIgniteInstanceName() + "-" + jdbcName); cfg.setClientMode(true); // Force client mode. return new IgniteBiTuple<>(cfg, cfgMap.getValue()); } catch (IgniteCheckedException e) { throw new IgniteException(e); } } /** {@inheritDoc} */ @Override public Statement createStatement() throws SQLException { return createStatement(TYPE_FORWARD_ONLY, CONCUR_READ_ONLY, HOLD_CURSORS_OVER_COMMIT); } /** {@inheritDoc} */ @Override public PreparedStatement prepareStatement(String sql) throws SQLException { ensureNotClosed(); return prepareStatement(sql, TYPE_FORWARD_ONLY, CONCUR_READ_ONLY, HOLD_CURSORS_OVER_COMMIT); } /** {@inheritDoc} */ @Override public CallableStatement prepareCall(String sql) throws SQLException { ensureNotClosed(); throw new SQLFeatureNotSupportedException("Callable functions are not supported."); } /** {@inheritDoc} */ @Override public String nativeSQL(String sql) throws SQLException { ensureNotClosed(); return sql; } /** {@inheritDoc} */ @Override public void setAutoCommit(boolean autoCommit) throws SQLException { ensureNotClosed(); if (!txAllowed && !autoCommit) throw new SQLFeatureNotSupportedException("Transactions are not supported."); } /** {@inheritDoc} */ @Override public boolean getAutoCommit() throws SQLException { ensureNotClosed(); return true; } /** {@inheritDoc} */ @Override public void commit() throws SQLException { ensureNotClosed(); if (!txAllowed) throw new SQLFeatureNotSupportedException("Transactions are not supported."); } /** {@inheritDoc} */ @Override public void rollback() throws SQLException { ensureNotClosed(); if (!txAllowed) throw new SQLFeatureNotSupportedException("Transactions are not supported."); } /** {@inheritDoc} */ @Override public void close() throws SQLException { if (closed) return; closed = true; for (Iterator<JdbcStatement> it = statements.iterator(); it.hasNext();) { JdbcStatement stmt = it.next(); stmt.closeInternal(); it.remove(); } IgniteNodeFuture fut = NODES.get(cfg); if (fut != null && fut.release()) { NODES.remove(cfg); if (ignite != null) ignite.close(); } } /** {@inheritDoc} */ @Override public boolean isClosed() throws SQLException { return closed; } /** {@inheritDoc} */ @Override public DatabaseMetaData getMetaData() throws SQLException { ensureNotClosed(); return new JdbcDatabaseMetadata(this); } /** {@inheritDoc} */ @Override public void setReadOnly(boolean readOnly) throws SQLException { ensureNotClosed(); } /** {@inheritDoc} */ @Override public boolean isReadOnly() throws SQLException { ensureNotClosed(); return true; } /** {@inheritDoc} */ @Override public void setCatalog(String catalog) throws SQLException { ensureNotClosed(); throw new SQLFeatureNotSupportedException("Catalogs are not supported."); } /** {@inheritDoc} */ @Override public String getCatalog() throws SQLException { ensureNotClosed(); return null; } /** {@inheritDoc} */ @Override public void setTransactionIsolation(int level) throws SQLException { ensureNotClosed(); if (txAllowed) txIsolation = level; else throw new SQLFeatureNotSupportedException("Transactions are not supported."); } /** {@inheritDoc} */ @Override public int getTransactionIsolation() throws SQLException { ensureNotClosed(); if (txAllowed) return txIsolation; else throw new SQLFeatureNotSupportedException("Transactions are not supported."); } /** {@inheritDoc} */ @Override public SQLWarning getWarnings() throws SQLException { ensureNotClosed(); return null; } /** {@inheritDoc} */ @Override public void clearWarnings() throws SQLException { ensureNotClosed(); } /** {@inheritDoc} */ @Override public Statement createStatement(int resSetType, int resSetConcurrency) throws SQLException { return createStatement(resSetType, resSetConcurrency, HOLD_CURSORS_OVER_COMMIT); } /** {@inheritDoc} */ @Override public PreparedStatement prepareStatement(String sql, int resSetType, int resSetConcurrency) throws SQLException { ensureNotClosed(); return prepareStatement(sql, resSetType, resSetConcurrency, HOLD_CURSORS_OVER_COMMIT); } /** {@inheritDoc} */ @Override public CallableStatement prepareCall(String sql, int resSetType, int resSetConcurrency) throws SQLException { ensureNotClosed(); throw new SQLFeatureNotSupportedException("Callable functions are not supported."); } /** {@inheritDoc} */ @Override public Map<String, Class<?>> getTypeMap() throws SQLException { throw new SQLFeatureNotSupportedException("Types mapping is not supported."); } /** {@inheritDoc} */ @Override public void setTypeMap(Map<String, Class<?>> map) throws SQLException { ensureNotClosed(); throw new SQLFeatureNotSupportedException("Types mapping is not supported."); } /** {@inheritDoc} */ @Override public void setHoldability(int holdability) throws SQLException { ensureNotClosed(); if (!txAllowed && holdability != HOLD_CURSORS_OVER_COMMIT) throw new SQLFeatureNotSupportedException("Invalid holdability (transactions are not supported)."); } /** {@inheritDoc} */ @Override public int getHoldability() throws SQLException { ensureNotClosed(); return HOLD_CURSORS_OVER_COMMIT; } /** {@inheritDoc} */ @Override public Savepoint setSavepoint() throws SQLException { ensureNotClosed(); throw new SQLFeatureNotSupportedException("Savepoints are not supported."); } /** {@inheritDoc} */ @Override public Savepoint setSavepoint(String name) throws SQLException { ensureNotClosed(); throw new SQLFeatureNotSupportedException("Savepoints are not supported."); } /** {@inheritDoc} */ @Override public void rollback(Savepoint savepoint) throws SQLException { ensureNotClosed(); throw new SQLFeatureNotSupportedException("Savepoints are not supported."); } /** {@inheritDoc} */ @Override public void releaseSavepoint(Savepoint savepoint) throws SQLException { ensureNotClosed(); throw new SQLFeatureNotSupportedException("Savepoints are not supported."); } /** {@inheritDoc} */ @Override public Statement createStatement(int resSetType, int resSetConcurrency, int resSetHoldability) throws SQLException { ensureNotClosed(); if (resSetType != TYPE_FORWARD_ONLY) throw new SQLFeatureNotSupportedException("Invalid result set type (only forward is supported.)"); if (resSetConcurrency != CONCUR_READ_ONLY) throw new SQLFeatureNotSupportedException("Invalid concurrency (updates are not supported)."); if (!txAllowed && resSetHoldability != HOLD_CURSORS_OVER_COMMIT) throw new SQLFeatureNotSupportedException("Invalid holdability (transactions are not supported)."); JdbcStatement stmt = new JdbcStatement(this); statements.add(stmt); return stmt; } /** {@inheritDoc} */ @Override public PreparedStatement prepareStatement(String sql, int resSetType, int resSetConcurrency, int resSetHoldability) throws SQLException { ensureNotClosed(); if (resSetType != TYPE_FORWARD_ONLY) throw new SQLFeatureNotSupportedException("Invalid result set type (only forward is supported.)"); if (resSetConcurrency != CONCUR_READ_ONLY) throw new SQLFeatureNotSupportedException("Invalid concurrency (updates are not supported)."); if (!txAllowed && resSetHoldability != HOLD_CURSORS_OVER_COMMIT) throw new SQLFeatureNotSupportedException("Invalid holdability (transactions are not supported)."); JdbcPreparedStatement stmt; if (!stream) stmt = new JdbcPreparedStatement(this, sql); else { GridQueryIndexing idx = ignite().context().query().getIndexing(); if (!idx.isStreamableInsertStatement(schemaName(), new SqlFieldsQuery(sql))) throw new IgniteSQLException("Streaming mode supports only INSERT commands without subqueries.", IgniteQueryErrorCode.UNSUPPORTED_OPERATION).toJdbcException(); IgniteDataStreamer streamer = ignite().dataStreamer(cacheName); streamer.autoFlushFrequency(streamFlushTimeout); streamer.allowOverwrite(streamAllowOverwrite); if (streamNodeBufSize > 0) streamer.perNodeBufferSize(streamNodeBufSize); if (streamNodeParOps > 0) streamer.perNodeParallelOperations(streamNodeParOps); stmt = new JdbcStreamedPreparedStatement(this, sql, streamer); } statements.add(stmt); return stmt; } /** {@inheritDoc} */ @Override public CallableStatement prepareCall(String sql, int resSetType, int resSetConcurrency, int resSetHoldability) throws SQLException { ensureNotClosed(); throw new SQLFeatureNotSupportedException("Callable functions are not supported."); } /** {@inheritDoc} */ @Override public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException { ensureNotClosed(); throw new SQLFeatureNotSupportedException("Updates are not supported."); } /** {@inheritDoc} */ @Override public PreparedStatement prepareStatement(String sql, int[] colIndexes) throws SQLException { ensureNotClosed(); throw new SQLFeatureNotSupportedException("Updates are not supported."); } /** {@inheritDoc} */ @Override public PreparedStatement prepareStatement(String sql, String[] colNames) throws SQLException { ensureNotClosed(); throw new SQLFeatureNotSupportedException("Updates are not supported."); } /** {@inheritDoc} */ @Override public Clob createClob() throws SQLException { ensureNotClosed(); return new JdbcClob(""); } /** {@inheritDoc} */ @Override public Blob createBlob() throws SQLException { ensureNotClosed(); return new JdbcBlob(new byte[0]); } /** {@inheritDoc} */ @Override public NClob createNClob() throws SQLException { ensureNotClosed(); throw new SQLFeatureNotSupportedException("SQL-specific types are not supported."); } /** {@inheritDoc} */ @Override public SQLXML createSQLXML() throws SQLException { ensureNotClosed(); throw new SQLFeatureNotSupportedException("SQL-specific types are not supported."); } /** {@inheritDoc} */ @Override public boolean isValid(int timeout) throws SQLException { ensureNotClosed(); if (timeout < 0) throw new SQLException("Invalid timeout: " + timeout); try { JdbcConnectionValidationTask task = new JdbcConnectionValidationTask(cacheName, nodeId == null ? ignite : null); if (nodeId != null) { ClusterGroup grp = ignite.cluster().forServers().forNodeId(nodeId); if (grp.nodes().isEmpty()) throw new SQLException("Failed to establish connection with node (is it a server node?): " + nodeId); assert grp.nodes().size() == 1; return ((IgniteEx)ignite).context().closure().callAsync( BALANCE, task, options(grp.nodes()) ).get(timeout, SECONDS); } else return task.call(); } catch (IgniteClientDisconnectedException | ComputeTaskTimeoutException e) { throw new SQLException("Failed to establish connection.", SqlStateCode.CONNECTION_FAILURE, e); } catch (IgniteCheckedException | IgniteException ignored) { return false; } } /** {@inheritDoc} */ @Override public void setClientInfo(String name, String val) throws SQLClientInfoException { throw new UnsupportedOperationException("Client info is not supported."); } /** {@inheritDoc} */ @Override public void setClientInfo(Properties props) throws SQLClientInfoException { throw new UnsupportedOperationException("Client info is not supported."); } /** {@inheritDoc} */ @Override public String getClientInfo(String name) throws SQLException { ensureNotClosed(); return null; } /** {@inheritDoc} */ @Override public Properties getClientInfo() throws SQLException { ensureNotClosed(); return new Properties(); } /** {@inheritDoc} */ @Override public Array createArrayOf(String typeName, Object[] elements) throws SQLException { ensureNotClosed(); throw new SQLFeatureNotSupportedException("SQL-specific types are not supported."); } /** {@inheritDoc} */ @Override public Struct createStruct(String typeName, Object[] attrs) throws SQLException { ensureNotClosed(); throw new SQLFeatureNotSupportedException("SQL-specific types are not supported."); } /** {@inheritDoc} */ @Override public <T> T unwrap(Class<T> iface) throws SQLException { if (!isWrapperFor(iface)) throw new SQLException("Connection is not a wrapper for " + iface.getName()); return (T)this; } /** {@inheritDoc} */ @Override public boolean isWrapperFor(Class<?> iface) throws SQLException { return iface != null && iface == Connection.class; } /** {@inheritDoc} */ @Override public void setSchema(String schemaName) throws SQLException { this.schemaName = JdbcUtils.normalizeSchema(schemaName); } /** {@inheritDoc} */ @Override public String getSchema() throws SQLException { return schemaName; } /** * @return Normalized schema name. */ public String schemaName() { return F.isEmpty(schemaName) ? QueryUtils.DFLT_SCHEMA : schemaName; } /** {@inheritDoc} */ @Override public void abort(Executor executor) throws SQLException { close(); } /** {@inheritDoc} */ @Override public void setNetworkTimeout(Executor executor, int ms) throws SQLException { throw new SQLFeatureNotSupportedException("Network timeout is not supported."); } /** {@inheritDoc} */ @Override public int getNetworkTimeout() throws SQLException { throw new SQLFeatureNotSupportedException("Network timeout is not supported."); } /** * @return Ignite node. */ IgniteKernal ignite() { return (IgniteKernal)ignite; } /** * @return Cache name. */ String cacheName() { return cacheName; } /** * @return URL. */ String url() { return url; } /** * @return Node ID. */ UUID nodeId() { return nodeId; } /** * @return {@code true} if target node has DML support, {@code false} otherwise. */ boolean isDmlSupported() { return ignite.version().greaterThanEqual(1, 8, 0); } /** * @return {@code true} if multiple statements allowed, {@code false} otherwise. */ boolean isMultipleStatementsAllowed() { return multipleStmts; } /** * @return {@code true} if multiple statements allowed, {@code false} otherwise. */ boolean isMultipleStatementsSupported() { return U.isOldestNodeVersionAtLeast(MULTIPLE_STATEMENTS_SUPPORTED_SINCE, ignite.cluster().nodes()); } /** * @return {@code true} if multiple statements allowed, {@code false} otherwise. */ boolean isMultipleStatementsTaskV2Supported() { return U.isOldestNodeVersionAtLeast(MULTIPLE_STATEMENTS_TASK_V2_SUPPORTED_SINCE, ignite.cluster().nodes()); } /** * @return {@code true} if close remote cursor is supported. */ boolean isCloseCursorTaskSupported() { return U.isOldestNodeVersionAtLeast(CLOSE_CURSOR_TASK_SUPPORTED_SINCE, ignite.cluster().nodes()); } /** * @return {@code true} if multiple statements allowed, {@code false} otherwise. */ boolean isMultipleStatementsTaskV3Supported() { return U.isOldestNodeVersionAtLeast(MULTIPLE_STATEMENTS_TASK_V3_SUPPORTED_SINCE, ignite.cluster().nodes()); } /** * @return {@code true} if update on server is enabled, {@code false} otherwise. */ boolean skipReducerOnUpdate() { return skipReducerOnUpdate; } /** * @return Local query flag. */ boolean isLocalQuery() { return locQry; } /** * @return Collocated query flag. */ boolean isCollocatedQuery() { return collocatedQry; } /** * @return Distributed joins flag. */ boolean isDistributedJoins() { return distributedJoins; } /** * @return Enforce join order flag. */ boolean isEnforceJoinOrder() { return enforceJoinOrder; } /** * @return Lazy query execution flag. */ boolean isLazy() { return lazy; } /** * Ensures that connection is not closed. * * @throws SQLException If connection is closed. */ void ensureNotClosed() throws SQLException { if (closed) throw new SQLException("Connection is closed.", SqlStateCode.CONNECTION_CLOSED); } /** * @return Internal statement. * @throws SQLException In case of error. */ JdbcStatement createStatement0() throws SQLException { return (JdbcStatement)createStatement(); } /** * Describes the client connection: * - thin cli: "cli:host:port@user_name" * - thin JDBC: "jdbc-thin:host:port@user_name" * - ODBC: "odbc:host:port@user_name" * * Used by the running query view to display query initiator. * * @return Client descriptor string. */ String clientDescriptor() { return clientDesc; } /** * JDBC connection validation task. */ private static class JdbcConnectionValidationTask implements IgniteCallable<Boolean> { /** Serial version uid. */ private static final long serialVersionUID = 0L; /** Cache name. */ private final String cacheName; /** Ignite. */ @IgniteInstanceResource private Ignite ignite; /** * @param cacheName Cache name. * @param ignite Ignite instance. */ public JdbcConnectionValidationTask(String cacheName, Ignite ignite) { this.cacheName = cacheName; this.ignite = ignite; } /** {@inheritDoc} */ @Override public Boolean call() { return cacheName == null || ignite.cache(cacheName) != null; } } /** * */ private static class IgniteNodeFuture extends GridFutureAdapter<Ignite> { /** Reference count. */ private final AtomicInteger refCnt = new AtomicInteger(1); /** * */ public boolean acquire() { while (true) { int cur = refCnt.get(); if (cur == 0) return false; if (refCnt.compareAndSet(cur, cur + 1)) return true; } } /** * */ public boolean release() { while (true) { int cur = refCnt.get(); assert cur > 0; if (refCnt.compareAndSet(cur, cur - 1)) // CASed to 0. return cur == 1; } } } }
googleapis/google-cloud-java
35,230
java-talent/proto-google-cloud-talent-v4/src/main/java/com/google/cloud/talent/v4/UpdateJobRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/talent/v4/job_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.talent.v4; /** * * * <pre> * Update job request. * </pre> * * Protobuf type {@code google.cloud.talent.v4.UpdateJobRequest} */ public final class UpdateJobRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.talent.v4.UpdateJobRequest) UpdateJobRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateJobRequest.newBuilder() to construct. private UpdateJobRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateJobRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateJobRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.talent.v4.JobServiceProto .internal_static_google_cloud_talent_v4_UpdateJobRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.talent.v4.JobServiceProto .internal_static_google_cloud_talent_v4_UpdateJobRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.talent.v4.UpdateJobRequest.class, com.google.cloud.talent.v4.UpdateJobRequest.Builder.class); } private int bitField0_; public static final int JOB_FIELD_NUMBER = 1; private com.google.cloud.talent.v4.Job job_; /** * * * <pre> * Required. The Job to be updated. * </pre> * * <code>.google.cloud.talent.v4.Job job = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return Whether the job field is set. */ @java.lang.Override public boolean hasJob() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The Job to be updated. * </pre> * * <code>.google.cloud.talent.v4.Job job = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The job. */ @java.lang.Override public com.google.cloud.talent.v4.Job getJob() { return job_ == null ? com.google.cloud.talent.v4.Job.getDefaultInstance() : job_; } /** * * * <pre> * Required. The Job to be updated. * </pre> * * <code>.google.cloud.talent.v4.Job job = 1 [(.google.api.field_behavior) = REQUIRED];</code> */ @java.lang.Override public com.google.cloud.talent.v4.JobOrBuilder getJobOrBuilder() { return job_ == null ? com.google.cloud.talent.v4.Job.getDefaultInstance() : job_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4.UpdateJobRequest.update_mask] is * provided, only the specified fields in * [job][google.cloud.talent.v4.UpdateJobRequest.job] are updated. Otherwise * all the fields are updated. * * A field mask to restrict the fields that are updated. Only * top level fields of [Job][google.cloud.talent.v4.Job] are supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4.UpdateJobRequest.update_mask] is * provided, only the specified fields in * [job][google.cloud.talent.v4.UpdateJobRequest.job] are updated. Otherwise * all the fields are updated. * * A field mask to restrict the fields that are updated. Only * top level fields of [Job][google.cloud.talent.v4.Job] are supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4.UpdateJobRequest.update_mask] is * provided, only the specified fields in * [job][google.cloud.talent.v4.UpdateJobRequest.job] are updated. Otherwise * all the fields are updated. * * A field mask to restrict the fields that are updated. Only * top level fields of [Job][google.cloud.talent.v4.Job] are supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getJob()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getJob()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.talent.v4.UpdateJobRequest)) { return super.equals(obj); } com.google.cloud.talent.v4.UpdateJobRequest other = (com.google.cloud.talent.v4.UpdateJobRequest) obj; if (hasJob() != other.hasJob()) return false; if (hasJob()) { if (!getJob().equals(other.getJob())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasJob()) { hash = (37 * hash) + JOB_FIELD_NUMBER; hash = (53 * hash) + getJob().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.talent.v4.UpdateJobRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.talent.v4.UpdateJobRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.talent.v4.UpdateJobRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.talent.v4.UpdateJobRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.talent.v4.UpdateJobRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.talent.v4.UpdateJobRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.talent.v4.UpdateJobRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.talent.v4.UpdateJobRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.talent.v4.UpdateJobRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.talent.v4.UpdateJobRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.talent.v4.UpdateJobRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.talent.v4.UpdateJobRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.talent.v4.UpdateJobRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Update job request. * </pre> * * Protobuf type {@code google.cloud.talent.v4.UpdateJobRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.talent.v4.UpdateJobRequest) com.google.cloud.talent.v4.UpdateJobRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.talent.v4.JobServiceProto .internal_static_google_cloud_talent_v4_UpdateJobRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.talent.v4.JobServiceProto .internal_static_google_cloud_talent_v4_UpdateJobRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.talent.v4.UpdateJobRequest.class, com.google.cloud.talent.v4.UpdateJobRequest.Builder.class); } // Construct using com.google.cloud.talent.v4.UpdateJobRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getJobFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; job_ = null; if (jobBuilder_ != null) { jobBuilder_.dispose(); jobBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.talent.v4.JobServiceProto .internal_static_google_cloud_talent_v4_UpdateJobRequest_descriptor; } @java.lang.Override public com.google.cloud.talent.v4.UpdateJobRequest getDefaultInstanceForType() { return com.google.cloud.talent.v4.UpdateJobRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.talent.v4.UpdateJobRequest build() { com.google.cloud.talent.v4.UpdateJobRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.talent.v4.UpdateJobRequest buildPartial() { com.google.cloud.talent.v4.UpdateJobRequest result = new com.google.cloud.talent.v4.UpdateJobRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.talent.v4.UpdateJobRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.job_ = jobBuilder_ == null ? job_ : jobBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.talent.v4.UpdateJobRequest) { return mergeFrom((com.google.cloud.talent.v4.UpdateJobRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.talent.v4.UpdateJobRequest other) { if (other == com.google.cloud.talent.v4.UpdateJobRequest.getDefaultInstance()) return this; if (other.hasJob()) { mergeJob(other.getJob()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getJobFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.talent.v4.Job job_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.talent.v4.Job, com.google.cloud.talent.v4.Job.Builder, com.google.cloud.talent.v4.JobOrBuilder> jobBuilder_; /** * * * <pre> * Required. The Job to be updated. * </pre> * * <code>.google.cloud.talent.v4.Job job = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return Whether the job field is set. */ public boolean hasJob() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The Job to be updated. * </pre> * * <code>.google.cloud.talent.v4.Job job = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The job. */ public com.google.cloud.talent.v4.Job getJob() { if (jobBuilder_ == null) { return job_ == null ? com.google.cloud.talent.v4.Job.getDefaultInstance() : job_; } else { return jobBuilder_.getMessage(); } } /** * * * <pre> * Required. The Job to be updated. * </pre> * * <code>.google.cloud.talent.v4.Job job = 1 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder setJob(com.google.cloud.talent.v4.Job value) { if (jobBuilder_ == null) { if (value == null) { throw new NullPointerException(); } job_ = value; } else { jobBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The Job to be updated. * </pre> * * <code>.google.cloud.talent.v4.Job job = 1 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder setJob(com.google.cloud.talent.v4.Job.Builder builderForValue) { if (jobBuilder_ == null) { job_ = builderForValue.build(); } else { jobBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The Job to be updated. * </pre> * * <code>.google.cloud.talent.v4.Job job = 1 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder mergeJob(com.google.cloud.talent.v4.Job value) { if (jobBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && job_ != null && job_ != com.google.cloud.talent.v4.Job.getDefaultInstance()) { getJobBuilder().mergeFrom(value); } else { job_ = value; } } else { jobBuilder_.mergeFrom(value); } if (job_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The Job to be updated. * </pre> * * <code>.google.cloud.talent.v4.Job job = 1 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder clearJob() { bitField0_ = (bitField0_ & ~0x00000001); job_ = null; if (jobBuilder_ != null) { jobBuilder_.dispose(); jobBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The Job to be updated. * </pre> * * <code>.google.cloud.talent.v4.Job job = 1 [(.google.api.field_behavior) = REQUIRED];</code> */ public com.google.cloud.talent.v4.Job.Builder getJobBuilder() { bitField0_ |= 0x00000001; onChanged(); return getJobFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The Job to be updated. * </pre> * * <code>.google.cloud.talent.v4.Job job = 1 [(.google.api.field_behavior) = REQUIRED];</code> */ public com.google.cloud.talent.v4.JobOrBuilder getJobOrBuilder() { if (jobBuilder_ != null) { return jobBuilder_.getMessageOrBuilder(); } else { return job_ == null ? com.google.cloud.talent.v4.Job.getDefaultInstance() : job_; } } /** * * * <pre> * Required. The Job to be updated. * </pre> * * <code>.google.cloud.talent.v4.Job job = 1 [(.google.api.field_behavior) = REQUIRED];</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.talent.v4.Job, com.google.cloud.talent.v4.Job.Builder, com.google.cloud.talent.v4.JobOrBuilder> getJobFieldBuilder() { if (jobBuilder_ == null) { jobBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.talent.v4.Job, com.google.cloud.talent.v4.Job.Builder, com.google.cloud.talent.v4.JobOrBuilder>( getJob(), getParentForChildren(), isClean()); job_ = null; } return jobBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4.UpdateJobRequest.update_mask] is * provided, only the specified fields in * [job][google.cloud.talent.v4.UpdateJobRequest.job] are updated. Otherwise * all the fields are updated. * * A field mask to restrict the fields that are updated. Only * top level fields of [Job][google.cloud.talent.v4.Job] are supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4.UpdateJobRequest.update_mask] is * provided, only the specified fields in * [job][google.cloud.talent.v4.UpdateJobRequest.job] are updated. Otherwise * all the fields are updated. * * A field mask to restrict the fields that are updated. Only * top level fields of [Job][google.cloud.talent.v4.Job] are supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4.UpdateJobRequest.update_mask] is * provided, only the specified fields in * [job][google.cloud.talent.v4.UpdateJobRequest.job] are updated. Otherwise * all the fields are updated. * * A field mask to restrict the fields that are updated. Only * top level fields of [Job][google.cloud.talent.v4.Job] are supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4.UpdateJobRequest.update_mask] is * provided, only the specified fields in * [job][google.cloud.talent.v4.UpdateJobRequest.job] are updated. Otherwise * all the fields are updated. * * A field mask to restrict the fields that are updated. Only * top level fields of [Job][google.cloud.talent.v4.Job] are supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4.UpdateJobRequest.update_mask] is * provided, only the specified fields in * [job][google.cloud.talent.v4.UpdateJobRequest.job] are updated. Otherwise * all the fields are updated. * * A field mask to restrict the fields that are updated. Only * top level fields of [Job][google.cloud.talent.v4.Job] are supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4.UpdateJobRequest.update_mask] is * provided, only the specified fields in * [job][google.cloud.talent.v4.UpdateJobRequest.job] are updated. Otherwise * all the fields are updated. * * A field mask to restrict the fields that are updated. Only * top level fields of [Job][google.cloud.talent.v4.Job] are supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4.UpdateJobRequest.update_mask] is * provided, only the specified fields in * [job][google.cloud.talent.v4.UpdateJobRequest.job] are updated. Otherwise * all the fields are updated. * * A field mask to restrict the fields that are updated. Only * top level fields of [Job][google.cloud.talent.v4.Job] are supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4.UpdateJobRequest.update_mask] is * provided, only the specified fields in * [job][google.cloud.talent.v4.UpdateJobRequest.job] are updated. Otherwise * all the fields are updated. * * A field mask to restrict the fields that are updated. Only * top level fields of [Job][google.cloud.talent.v4.Job] are supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4.UpdateJobRequest.update_mask] is * provided, only the specified fields in * [job][google.cloud.talent.v4.UpdateJobRequest.job] are updated. Otherwise * all the fields are updated. * * A field mask to restrict the fields that are updated. Only * top level fields of [Job][google.cloud.talent.v4.Job] are supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.talent.v4.UpdateJobRequest) } // @@protoc_insertion_point(class_scope:google.cloud.talent.v4.UpdateJobRequest) private static final com.google.cloud.talent.v4.UpdateJobRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.talent.v4.UpdateJobRequest(); } public static com.google.cloud.talent.v4.UpdateJobRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateJobRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateJobRequest>() { @java.lang.Override public UpdateJobRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateJobRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateJobRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.talent.v4.UpdateJobRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/hadoop
35,402
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/AMRMClient.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.client.api; import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.Set; import java.util.function.Supplier; import java.util.List; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse; import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerUpdateType; import org.apache.hadoop.yarn.api.records.ExecutionType; import org.apache.hadoop.yarn.api.records.ExecutionTypeRequest; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.SchedulingRequest; import org.apache.hadoop.yarn.api.records.UpdateContainerRequest; import org.apache.hadoop.yarn.api.resource.PlacementConstraint; import org.apache.hadoop.yarn.client.api.impl.AMRMClientImpl; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.util.resource.Resources; import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableList; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @InterfaceAudience.Public @InterfaceStability.Stable public abstract class AMRMClient<T extends AMRMClient.ContainerRequest> extends AbstractService { private static final Logger LOG = LoggerFactory.getLogger(AMRMClient.class); private TimelineV2Client timelineV2Client; /** * Create a new instance of AMRMClient. * For usage: * <pre> * {@code * AMRMClient.<T>createAMRMClientContainerRequest() * }</pre> * @return the newly create AMRMClient instance. */ @Public public static <T extends ContainerRequest> AMRMClient<T> createAMRMClient() { AMRMClient<T> client = new AMRMClientImpl<T>(); return client; } private NMTokenCache nmTokenCache; @Private protected AMRMClient(String name) { super(name); nmTokenCache = NMTokenCache.getSingleton(); } /** * Object to represent a single container request for resources. Scheduler * documentation should be consulted for the specifics of how the parameters * are honored. * * By default, YARN schedulers try to allocate containers at the requested * locations but they may relax the constraints in order to expedite meeting * allocations limits. They first relax the constraint to the same rack as the * requested node and then to anywhere in the cluster. The relaxLocality flag * may be used to disable locality relaxation and request containers at only * specific locations. The following conditions apply. * <ul> * <li>Within a priority, all container requests must have the same value for * locality relaxation. Either enabled or disabled.</li> * <li>If locality relaxation is disabled, then across requests, locations at * different network levels may not be specified. E.g. its invalid to make a * request for a specific node and another request for a specific rack.</li> * <li>If locality relaxation is disabled, then only within the same request, * a node and its rack may be specified together. This allows for a specific * rack with a preference for a specific node within that rack.</li> * <li></li> * </ul> * To re-enable locality relaxation at a given priority, all pending requests * with locality relaxation disabled must be first removed. Then they can be * added back with locality relaxation enabled. * * All getters return immutable values. */ public static class ContainerRequest { private Resource capability; private List<String> nodes; private List<String> racks; private Priority priority; private long allocationRequestId; private boolean relaxLocality = true; private String nodeLabelsExpression; private ExecutionTypeRequest executionTypeRequest = ExecutionTypeRequest.newInstance(); private String resourceProfile = null; /** * Instantiates a {@link ContainerRequest} with the given constraints and * locality relaxation enabled. * * @param capability * The {@link Resource} to be requested for each container. * @param nodes * Any hosts to request that the containers are placed on. * @param racks * Any racks to request that the containers are placed on. The * racks corresponding to any hosts requested will be automatically * added to this list. * @param priority * The priority at which to request the containers. Higher * priorities have lower numerical values. */ public ContainerRequest(Resource capability, String[] nodes, String[] racks, Priority priority) { this(capability, nodes, racks, priority, true, null); } @VisibleForTesting public ContainerRequest(Resource capability, String[] nodes, String[] racks, Priority priority, String profile) { this(capability, nodes, racks, priority, 0, true, null, ExecutionTypeRequest.newInstance(), profile); } /** * Instantiates a {@link ContainerRequest} with the given constraints and * locality relaxation enabled. * * @param capability * The {@link Resource} to be requested for each container. * @param nodes * Any hosts to request that the containers are placed on. * @param racks * Any racks to request that the containers are placed on. The * racks corresponding to any hosts requested will be automatically * added to this list. * @param priority * The priority at which to request the containers. Higher * priorities have lower numerical values. * @param allocationRequestId Allocation Request Id */ @Public @InterfaceStability.Evolving public ContainerRequest(Resource capability, String[] nodes, String[] racks, Priority priority, long allocationRequestId) { this(capability, nodes, racks, priority, allocationRequestId, true, null, ExecutionTypeRequest.newInstance()); } /** * Instantiates a {@link ContainerRequest} with the given constraints. * * @param capability * The {@link Resource} to be requested for each container. * @param nodes * Any hosts to request that the containers are placed on. * @param racks * Any racks to request that the containers are placed on. The * racks corresponding to any hosts requested will be automatically * added to this list. * @param priority * The priority at which to request the containers. Higher * priorities have lower numerical values. * @param relaxLocality * If true, containers for this request may be assigned on hosts * and racks other than the ones explicitly requested. */ public ContainerRequest(Resource capability, String[] nodes, String[] racks, Priority priority, boolean relaxLocality) { this(capability, nodes, racks, priority, relaxLocality, null); } /** * Instantiates a {@link ContainerRequest} with the given constraints. * * @param capability * The {@link Resource} to be requested for each container. * @param nodes * Any hosts to request that the containers are placed on. * @param racks * Any racks to request that the containers are placed on. The * racks corresponding to any hosts requested will be automatically * added to this list. * @param priority * The priority at which to request the containers. Higher * priorities have lower numerical values. * @param relaxLocality * If true, containers for this request may be assigned on hosts * and racks other than the ones explicitly requested. * @param allocationRequestId Allocation Request Id */ @Public @InterfaceStability.Evolving public ContainerRequest(Resource capability, String[] nodes, String[] racks, Priority priority, long allocationRequestId, boolean relaxLocality) { this(capability, nodes, racks, priority, allocationRequestId, relaxLocality, null, ExecutionTypeRequest.newInstance()); } /** * Instantiates a {@link ContainerRequest} with the given constraints. * * @param capability * The {@link Resource} to be requested for each container. * @param nodes * Any hosts to request that the containers are placed on. * @param racks * Any racks to request that the containers are placed on. The * racks corresponding to any hosts requested will be automatically * added to this list. * @param priority * The priority at which to request the containers. Higher * priorities have lower numerical values. * @param relaxLocality * If true, containers for this request may be assigned on hosts * and racks other than the ones explicitly requested. * @param nodeLabelsExpression * Set node labels to allocate resource, now we only support * asking for only a single node label */ public ContainerRequest(Resource capability, String[] nodes, String[] racks, Priority priority, boolean relaxLocality, String nodeLabelsExpression) { this(capability, nodes, racks, priority, 0, relaxLocality, nodeLabelsExpression, ExecutionTypeRequest.newInstance()); } /** * Instantiates a {@link ContainerRequest} with the given constraints. * * @param capability * The {@link Resource} to be requested for each container. * @param nodes * Any hosts to request that the containers are placed on. * @param racks * Any racks to request that the containers are placed on. The * racks corresponding to any hosts requested will be automatically * added to this list. * @param priority * The priority at which to request the containers. Higher * priorities have lower numerical values. * @param allocationRequestId * The allocationRequestId of the request. To be used as a tracking * id to match Containers allocated against this request. Will * default to 0 if not specified. * @param relaxLocality * If true, containers for this request may be assigned on hosts * and racks other than the ones explicitly requested. * @param nodeLabelsExpression * Set node labels to allocate resource, now we only support * asking for only a single node label */ @Public @InterfaceStability.Evolving public ContainerRequest(Resource capability, String[] nodes, String[] racks, Priority priority, long allocationRequestId, boolean relaxLocality, String nodeLabelsExpression) { this(capability, nodes, racks, priority, allocationRequestId, relaxLocality, nodeLabelsExpression, ExecutionTypeRequest.newInstance()); } /** * Instantiates a {@link ContainerRequest} with the given constraints. * * @param capability * The {@link Resource} to be requested for each container. * @param nodes * Any hosts to request that the containers are placed on. * @param racks * Any racks to request that the containers are placed on. The * racks corresponding to any hosts requested will be automatically * added to this list. * @param priority * The priority at which to request the containers. Higher * priorities have lower numerical values. * @param allocationRequestId * The allocationRequestId of the request. To be used as a tracking * id to match Containers allocated against this request. Will * default to 0 if not specified. * @param relaxLocality * If true, containers for this request may be assigned on hosts * and racks other than the ones explicitly requested. * @param nodeLabelsExpression * Set node labels to allocate resource, now we only support * asking for only a single node label * @param executionTypeRequest * Set the execution type of the container request. */ public ContainerRequest(Resource capability, String[] nodes, String[] racks, Priority priority, long allocationRequestId, boolean relaxLocality, String nodeLabelsExpression, ExecutionTypeRequest executionTypeRequest) { this(capability, nodes, racks, priority, allocationRequestId, relaxLocality, nodeLabelsExpression, executionTypeRequest, null); } /** * Instantiates a {@link ContainerRequest} with the given constraints. * * @param capability * The {@link Resource} to be requested for each container. * @param nodes * Any hosts to request that the containers are placed on. * @param racks * Any racks to request that the containers are placed on. The * racks corresponding to any hosts requested will be automatically * added to this list. * @param priority * The priority at which to request the containers. Higher * priorities have lower numerical values. * @param allocationRequestId * The allocationRequestId of the request. To be used as a tracking * id to match Containers allocated against this request. Will * default to 0 if not specified. * @param relaxLocality * If true, containers for this request may be assigned on hosts * and racks other than the ones explicitly requested. * @param nodeLabelsExpression * Set node labels to allocate resource, now we only support * asking for only a single node label * @param executionTypeRequest * Set the execution type of the container request. * @param profile * Set the resource profile for the container request */ public ContainerRequest(Resource capability, String[] nodes, String[] racks, Priority priority, long allocationRequestId, boolean relaxLocality, String nodeLabelsExpression, ExecutionTypeRequest executionTypeRequest, String profile) { this.allocationRequestId = allocationRequestId; this.capability = capability; this.nodes = (nodes != null ? ImmutableList.copyOf(nodes) : null); this.racks = (racks != null ? ImmutableList.copyOf(racks) : null); this.priority = priority; this.relaxLocality = relaxLocality; this.nodeLabelsExpression = nodeLabelsExpression; this.executionTypeRequest = executionTypeRequest; this.resourceProfile = profile; sanityCheck(); } // Validate request private void sanityCheck() { Preconditions.checkArgument(capability != null, "The Resource to be requested for each container " + "should not be null "); Preconditions.checkArgument(priority != null, "The priority at which to request containers should not be null "); Preconditions.checkArgument( !(!relaxLocality && (racks == null || racks.size() == 0) && (nodes == null || nodes.size() == 0)), "Can't turn off locality relaxation on a " + "request with no location constraints"); } private ContainerRequest() {}; public Resource getCapability() { return capability; } public List<String> getNodes() { return nodes; } public List<String> getRacks() { return racks; } public Priority getPriority() { return priority; } public long getAllocationRequestId() { return allocationRequestId; } public boolean getRelaxLocality() { return relaxLocality; } public String getNodeLabelExpression() { return nodeLabelsExpression; } public ExecutionTypeRequest getExecutionTypeRequest() { return executionTypeRequest; } public String getResourceProfile() { return resourceProfile; } public String toString() { StringBuilder sb = new StringBuilder(); sb.append("Capability[").append(capability).append("]") .append("Priority[").append(priority).append("]") .append("AllocationRequestId[").append(allocationRequestId) .append("]") .append("ExecutionTypeRequest[").append(executionTypeRequest) .append("]") .append("Resource Profile[").append(resourceProfile).append("]"); return sb.toString(); } public static ContainerRequestBuilder newBuilder() { return new ContainerRequestBuilder(); } /** * Class to construct instances of {@link ContainerRequest} with specific * options. */ public static final class ContainerRequestBuilder { private ContainerRequest containerRequest = new ContainerRequest(); public ContainerRequestBuilder capability(Resource capability) { containerRequest.capability = capability; return this; } public ContainerRequestBuilder nodes(String[] nodes) { containerRequest.nodes = (nodes != null ? ImmutableList.copyOf(nodes): null); return this; } public ContainerRequestBuilder racks(String[] racks) { containerRequest.racks = (racks != null ? ImmutableList.copyOf(racks) : null); return this; } public ContainerRequestBuilder priority(Priority priority) { containerRequest.priority = priority; return this; } public ContainerRequestBuilder allocationRequestId( long allocationRequestId) { containerRequest.allocationRequestId = allocationRequestId; return this; } public ContainerRequestBuilder relaxLocality(boolean relaxLocality) { containerRequest.relaxLocality = relaxLocality; return this; } public ContainerRequestBuilder nodeLabelsExpression( String nodeLabelsExpression) { containerRequest.nodeLabelsExpression = nodeLabelsExpression; return this; } public ContainerRequestBuilder executionTypeRequest( ExecutionTypeRequest executionTypeRequest) { containerRequest.executionTypeRequest = executionTypeRequest; return this; } public ContainerRequestBuilder resourceProfile(String resourceProfile) { containerRequest.resourceProfile = resourceProfile; return this; } public ContainerRequest build() { containerRequest.sanityCheck(); return containerRequest; } } } /** * Add a Collection of SchedulingRequests. The AMRMClient will ensure that * all requests in the same batch are sent in the same allocate call. * @param schedulingRequests Collection of Scheduling Requests. */ @Public @InterfaceStability.Unstable public void addSchedulingRequests( Collection<SchedulingRequest> schedulingRequests) { } /** * Register the application master. This must be called before any * other interaction * @param appHostName Name of the host on which master is running * @param appHostPort Port master is listening on * @param appTrackingUrl URL at which the master info can be seen * @return <code>RegisterApplicationMasterResponse</code> * @throws YarnException * @throws IOException */ public abstract RegisterApplicationMasterResponse registerApplicationMaster(String appHostName, int appHostPort, String appTrackingUrl) throws YarnException, IOException; /** * Register the application master. This must be called before any * other interaction * @param appHostName Name of the host on which master is running * @param appHostPort Port master is listening on * @param appTrackingUrl URL at which the master info can be seen * @param placementConstraints Placement Constraints mappings. * @return <code>RegisterApplicationMasterResponse</code> * @throws YarnException * @throws IOException */ @Public @InterfaceStability.Unstable public RegisterApplicationMasterResponse registerApplicationMaster( String appHostName, int appHostPort, String appTrackingUrl, Map<Set<String>, PlacementConstraint> placementConstraints) throws YarnException, IOException { throw new YarnException("Not supported"); } /** * Request additional containers and receive new container allocations. * Requests made via <code>addContainerRequest</code> are sent to the * <code>ResourceManager</code>. New containers assigned to the master are * retrieved. Status of completed containers and node health updates are also * retrieved. This also doubles up as a heartbeat to the ResourceManager and * must be made periodically. The call may not always return any new * allocations of containers. App should not make concurrent allocate * requests. May cause request loss. * * <p> * Note : If the user has not removed container requests that have already * been satisfied, then the re-register may end up sending the entire * container requests to the RM (including matched requests). Which would mean * the RM could end up giving it a lot of new allocated containers. * </p> * * @param progressIndicator Indicates progress made by the master * @return the response of the allocate request * @throws YarnException * @throws IOException */ public abstract AllocateResponse allocate(float progressIndicator) throws YarnException, IOException; /** * Unregister the application master. This must be called in the end. * @param appStatus Success/Failure status of the master * @param appMessage Diagnostics message on failure * @param appTrackingUrl New URL to get master info * @throws YarnException * @throws IOException */ public abstract void unregisterApplicationMaster(FinalApplicationStatus appStatus, String appMessage, String appTrackingUrl) throws YarnException, IOException; /** * Request containers for resources before calling <code>allocate</code> * @param req Resource request */ public abstract void addContainerRequest(T req); /** * Remove previous container request. The previous container request may have * already been sent to the ResourceManager. So even after the remove request * the app must be prepared to receive an allocation for the previous request * even after the remove request * @param req Resource request */ public abstract void removeContainerRequest(T req); /** * Request container resource change before calling <code>allocate</code>. * Any previous pending resource change request of the same container will be * removed. * * Application that calls this method is expected to maintain the * <code>Container</code>s that are returned from previous successful * allocations or resource changes. By passing in the existing container and a * target resource capability to this method, the application requests the * ResourceManager to change the existing resource allocation to the target * resource allocation. * * @deprecated use * {@link #requestContainerUpdate(Container, UpdateContainerRequest)} * * @param container The container returned from the last successful resource * allocation or resource change * @param capability The target resource capability of the container */ @Deprecated public void requestContainerResourceChange( Container container, Resource capability) { Preconditions.checkNotNull(container, "Container cannot be null!!"); Preconditions.checkNotNull(capability, "UpdateContainerRequest cannot be null!!"); requestContainerUpdate(container, UpdateContainerRequest.newInstance( container.getVersion(), container.getId(), Resources.fitsIn(capability, container.getResource()) ? ContainerUpdateType.DECREASE_RESOURCE : ContainerUpdateType.INCREASE_RESOURCE, capability, null)); } /** * Request a container update before calling <code>allocate</code>. * Any previous pending update request of the same container will be * removed. * * @param container The container returned from the last successful resource * allocation or update * @param updateContainerRequest The <code>UpdateContainerRequest</code>. */ public abstract void requestContainerUpdate( Container container, UpdateContainerRequest updateContainerRequest); /** * Release containers assigned by the Resource Manager. If the app cannot use * the container or wants to give up the container then it can release them. * The app needs to make new requests for the released resource capability if * it still needs it. eg. it released non-local resources * @param containerId */ public abstract void releaseAssignedContainer(ContainerId containerId); /** * Get the currently available resources in the cluster. * A valid value is available after a call to allocate has been made * @return Currently available resources */ public abstract Resource getAvailableResources(); /** * Get the current number of nodes in the cluster. * A valid values is available after a call to allocate has been made * @return Current number of nodes in the cluster */ public abstract int getClusterNodeCount(); /** * Get outstanding <code>ContainerRequest</code>s matching the given * parameters. These ContainerRequests should have been added via * <code>addContainerRequest</code> earlier in the lifecycle. For performance, * the AMRMClient may return its internal collection directly without creating * a copy. Users should not perform mutable operations on the return value. * Each collection in the list contains requests with identical * <code>Resource</code> size that fit in the given capability. In a * collection, requests will be returned in the same order as they were added. * * NOTE: This API only matches Container requests that were created by the * client WITHOUT the allocationRequestId being set. * * @return Collection of request matching the parameters */ @InterfaceStability.Evolving public abstract List<? extends Collection<T>> getMatchingRequests( Priority priority, String resourceName, Resource capability); /** * Get outstanding <code>ContainerRequest</code>s matching the given * parameters. These ContainerRequests should have been added via * <code>addContainerRequest</code> earlier in the lifecycle. For performance, * the AMRMClient may return its internal collection directly without creating * a copy. Users should not perform mutable operations on the return value. * Each collection in the list contains requests with identical * <code>Resource</code> size that fit in the given capability. In a * collection, requests will be returned in the same order as they were added. * specify an <code>ExecutionType</code>. * * NOTE: This API only matches Container requests that were created by the * client WITHOUT the allocationRequestId being set. * * @param priority Priority * @param resourceName Location * @param executionType ExecutionType * @param capability Capability * @return Collection of request matching the parameters */ @InterfaceStability.Evolving public List<? extends Collection<T>> getMatchingRequests( Priority priority, String resourceName, ExecutionType executionType, Resource capability) { throw new UnsupportedOperationException("The sub-class extending" + " AMRMClient is expected to implement this !!"); } @InterfaceStability.Evolving public List<? extends Collection<T>> getMatchingRequests( Priority priority, String resourceName, ExecutionType executionType, Resource capability, String profile) { throw new UnsupportedOperationException("The sub-class extending" + " AMRMClient is expected to implement this !!"); } /** * Get outstanding <code>ContainerRequest</code>s matching the given * allocationRequestId. These ContainerRequests should have been added via * <code>addContainerRequest</code> earlier in the lifecycle. For performance, * the AMRMClient may return its internal collection directly without creating * a copy. Users should not perform mutable operations on the return value. * * NOTE: This API only matches Container requests that were created by the * client WITH the allocationRequestId being set to a non-default value. * * @param allocationRequestId Allocation Request Id * @return Collection of request matching the parameters */ @InterfaceStability.Evolving public abstract Collection<T> getMatchingRequests(long allocationRequestId); /** * Update application's blacklist with addition or removal resources. * * @param blacklistAdditions list of resources which should be added to the * application blacklist * @param blacklistRemovals list of resources which should be removed from the * application blacklist */ public abstract void updateBlacklist(List<String> blacklistAdditions, List<String> blacklistRemovals); /** * Set the NM token cache for the <code>AMRMClient</code>. This cache must * be shared with the {@link NMClient} used to manage containers for the * <code>AMRMClient</code> * <p> * If a NM token cache is not set, the {@link NMTokenCache#getSingleton()} * singleton instance will be used. * * @param nmTokenCache the NM token cache to use. */ public void setNMTokenCache(NMTokenCache nmTokenCache) { this.nmTokenCache = nmTokenCache; } /** * Get the NM token cache of the <code>AMRMClient</code>. This cache must be * shared with the {@link NMClient} used to manage containers for the * <code>AMRMClient</code>. * <p> * If a NM token cache is not set, the {@link NMTokenCache#getSingleton()} * singleton instance will be used. * * @return the NM token cache. */ public NMTokenCache getNMTokenCache() { return nmTokenCache; } /** * Register TimelineV2Client to AMRMClient. Writer's address for the timeline * V2 client will be updated dynamically if registered. * * @param client the timeline v2 client to register */ public void registerTimelineV2Client(TimelineV2Client client) { timelineV2Client = client; } /** * Get registered timeline v2 client. * @return the registered timeline v2 client */ public TimelineV2Client getRegisteredTimelineV2Client() { return this.timelineV2Client; } /** * Update application's tracking url on next heartbeat. * * @param trackingUrl new tracking url for this application */ @Public @InterfaceStability.Unstable public void updateTrackingUrl(String trackingUrl) { // Unimplemented. } /** * Wait for <code>check</code> to return true for each 1000 ms. * See also {@link #waitFor(java.util.function.Supplier, int)} * and {@link #waitFor(java.util.function.Supplier, int, int)} * @param check the condition for which it should wait */ public void waitFor(Supplier<Boolean> check) throws InterruptedException { waitFor(check, 1000); } /** * Wait for <code>check</code> to return true for each * <code>checkEveryMillis</code> ms. * See also {@link #waitFor(java.util.function.Supplier, int, int)} * @param check user defined checker * @param checkEveryMillis interval to call <code>check</code> */ public void waitFor(Supplier<Boolean> check, int checkEveryMillis) throws InterruptedException { waitFor(check, checkEveryMillis, 1); } /** * Wait for <code>check</code> to return true for each * <code>checkEveryMillis</code> ms. In the main loop, this method will log * the message "waiting in main loop" for each <code>logInterval</code> times * iteration to confirm the thread is alive. * @param check user defined checker * @param checkEveryMillis interval to call <code>check</code> * @param logInterval interval to log for each */ public void waitFor(Supplier<Boolean> check, int checkEveryMillis, int logInterval) throws InterruptedException { Preconditions.checkNotNull(check, "check should not be null"); Preconditions.checkArgument(checkEveryMillis >= 0, "checkEveryMillis should be positive value"); Preconditions.checkArgument(logInterval >= 0, "logInterval should be positive value"); int loggingCounter = logInterval; do { LOG.debug("Check the condition for main loop."); boolean result = check.get(); if (result) { LOG.info("Exits the main loop."); return; } if (--loggingCounter <= 0) { LOG.info("Waiting in main loop."); loggingCounter = logInterval; } Thread.sleep(checkEveryMillis); } while (true); } }
googleapis/google-cloud-java
35,259
java-gke-backup/google-cloud-gke-backup/src/test/java/com/google/cloud/gkebackup/v1/MockBackupForGKEImpl.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.gkebackup.v1; import com.google.api.core.BetaApi; import com.google.cloud.gkebackup.v1.BackupForGKEGrpc.BackupForGKEImplBase; import com.google.longrunning.Operation; import com.google.protobuf.AbstractMessage; import io.grpc.stub.StreamObserver; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.Queue; import javax.annotation.Generated; @BetaApi @Generated("by gapic-generator-java") public class MockBackupForGKEImpl extends BackupForGKEImplBase { private List<AbstractMessage> requests; private Queue<Object> responses; public MockBackupForGKEImpl() { requests = new ArrayList<>(); responses = new LinkedList<>(); } public List<AbstractMessage> getRequests() { return requests; } public void addResponse(AbstractMessage response) { responses.add(response); } public void setResponses(List<AbstractMessage> responses) { this.responses = new LinkedList<Object>(responses); } public void addException(Exception exception) { responses.add(exception); } public void reset() { requests = new ArrayList<>(); responses = new LinkedList<>(); } @Override public void createBackupPlan( CreateBackupPlanRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method CreateBackupPlan, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void listBackupPlans( ListBackupPlansRequest request, StreamObserver<ListBackupPlansResponse> responseObserver) { Object response = responses.poll(); if (response instanceof ListBackupPlansResponse) { requests.add(request); responseObserver.onNext(((ListBackupPlansResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method ListBackupPlans, expected %s or %s", response == null ? "null" : response.getClass().getName(), ListBackupPlansResponse.class.getName(), Exception.class.getName()))); } } @Override public void getBackupPlan( GetBackupPlanRequest request, StreamObserver<BackupPlan> responseObserver) { Object response = responses.poll(); if (response instanceof BackupPlan) { requests.add(request); responseObserver.onNext(((BackupPlan) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method GetBackupPlan, expected %s or %s", response == null ? "null" : response.getClass().getName(), BackupPlan.class.getName(), Exception.class.getName()))); } } @Override public void updateBackupPlan( UpdateBackupPlanRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method UpdateBackupPlan, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void deleteBackupPlan( DeleteBackupPlanRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method DeleteBackupPlan, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void createBackupChannel( CreateBackupChannelRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method CreateBackupChannel, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void listBackupChannels( ListBackupChannelsRequest request, StreamObserver<ListBackupChannelsResponse> responseObserver) { Object response = responses.poll(); if (response instanceof ListBackupChannelsResponse) { requests.add(request); responseObserver.onNext(((ListBackupChannelsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method ListBackupChannels, expected %s or %s", response == null ? "null" : response.getClass().getName(), ListBackupChannelsResponse.class.getName(), Exception.class.getName()))); } } @Override public void getBackupChannel( GetBackupChannelRequest request, StreamObserver<BackupChannel> responseObserver) { Object response = responses.poll(); if (response instanceof BackupChannel) { requests.add(request); responseObserver.onNext(((BackupChannel) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method GetBackupChannel, expected %s or %s", response == null ? "null" : response.getClass().getName(), BackupChannel.class.getName(), Exception.class.getName()))); } } @Override public void updateBackupChannel( UpdateBackupChannelRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method UpdateBackupChannel, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void deleteBackupChannel( DeleteBackupChannelRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method DeleteBackupChannel, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void listBackupPlanBindings( ListBackupPlanBindingsRequest request, StreamObserver<ListBackupPlanBindingsResponse> responseObserver) { Object response = responses.poll(); if (response instanceof ListBackupPlanBindingsResponse) { requests.add(request); responseObserver.onNext(((ListBackupPlanBindingsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method ListBackupPlanBindings, expected %s or" + " %s", response == null ? "null" : response.getClass().getName(), ListBackupPlanBindingsResponse.class.getName(), Exception.class.getName()))); } } @Override public void getBackupPlanBinding( GetBackupPlanBindingRequest request, StreamObserver<BackupPlanBinding> responseObserver) { Object response = responses.poll(); if (response instanceof BackupPlanBinding) { requests.add(request); responseObserver.onNext(((BackupPlanBinding) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method GetBackupPlanBinding, expected %s or" + " %s", response == null ? "null" : response.getClass().getName(), BackupPlanBinding.class.getName(), Exception.class.getName()))); } } @Override public void createBackup( CreateBackupRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method CreateBackup, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void listBackups( ListBackupsRequest request, StreamObserver<ListBackupsResponse> responseObserver) { Object response = responses.poll(); if (response instanceof ListBackupsResponse) { requests.add(request); responseObserver.onNext(((ListBackupsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method ListBackups, expected %s or %s", response == null ? "null" : response.getClass().getName(), ListBackupsResponse.class.getName(), Exception.class.getName()))); } } @Override public void getBackup(GetBackupRequest request, StreamObserver<Backup> responseObserver) { Object response = responses.poll(); if (response instanceof Backup) { requests.add(request); responseObserver.onNext(((Backup) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method GetBackup, expected %s or %s", response == null ? "null" : response.getClass().getName(), Backup.class.getName(), Exception.class.getName()))); } } @Override public void updateBackup( UpdateBackupRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method UpdateBackup, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void deleteBackup( DeleteBackupRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method DeleteBackup, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void listVolumeBackups( ListVolumeBackupsRequest request, StreamObserver<ListVolumeBackupsResponse> responseObserver) { Object response = responses.poll(); if (response instanceof ListVolumeBackupsResponse) { requests.add(request); responseObserver.onNext(((ListVolumeBackupsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method ListVolumeBackups, expected %s or %s", response == null ? "null" : response.getClass().getName(), ListVolumeBackupsResponse.class.getName(), Exception.class.getName()))); } } @Override public void getVolumeBackup( GetVolumeBackupRequest request, StreamObserver<VolumeBackup> responseObserver) { Object response = responses.poll(); if (response instanceof VolumeBackup) { requests.add(request); responseObserver.onNext(((VolumeBackup) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method GetVolumeBackup, expected %s or %s", response == null ? "null" : response.getClass().getName(), VolumeBackup.class.getName(), Exception.class.getName()))); } } @Override public void createRestorePlan( CreateRestorePlanRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method CreateRestorePlan, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void listRestorePlans( ListRestorePlansRequest request, StreamObserver<ListRestorePlansResponse> responseObserver) { Object response = responses.poll(); if (response instanceof ListRestorePlansResponse) { requests.add(request); responseObserver.onNext(((ListRestorePlansResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method ListRestorePlans, expected %s or %s", response == null ? "null" : response.getClass().getName(), ListRestorePlansResponse.class.getName(), Exception.class.getName()))); } } @Override public void getRestorePlan( GetRestorePlanRequest request, StreamObserver<RestorePlan> responseObserver) { Object response = responses.poll(); if (response instanceof RestorePlan) { requests.add(request); responseObserver.onNext(((RestorePlan) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method GetRestorePlan, expected %s or %s", response == null ? "null" : response.getClass().getName(), RestorePlan.class.getName(), Exception.class.getName()))); } } @Override public void updateRestorePlan( UpdateRestorePlanRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method UpdateRestorePlan, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void deleteRestorePlan( DeleteRestorePlanRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method DeleteRestorePlan, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void createRestoreChannel( CreateRestoreChannelRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method CreateRestoreChannel, expected %s or" + " %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void listRestoreChannels( ListRestoreChannelsRequest request, StreamObserver<ListRestoreChannelsResponse> responseObserver) { Object response = responses.poll(); if (response instanceof ListRestoreChannelsResponse) { requests.add(request); responseObserver.onNext(((ListRestoreChannelsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method ListRestoreChannels, expected %s or %s", response == null ? "null" : response.getClass().getName(), ListRestoreChannelsResponse.class.getName(), Exception.class.getName()))); } } @Override public void getRestoreChannel( GetRestoreChannelRequest request, StreamObserver<RestoreChannel> responseObserver) { Object response = responses.poll(); if (response instanceof RestoreChannel) { requests.add(request); responseObserver.onNext(((RestoreChannel) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method GetRestoreChannel, expected %s or %s", response == null ? "null" : response.getClass().getName(), RestoreChannel.class.getName(), Exception.class.getName()))); } } @Override public void updateRestoreChannel( UpdateRestoreChannelRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method UpdateRestoreChannel, expected %s or" + " %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void deleteRestoreChannel( DeleteRestoreChannelRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method DeleteRestoreChannel, expected %s or" + " %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void listRestorePlanBindings( ListRestorePlanBindingsRequest request, StreamObserver<ListRestorePlanBindingsResponse> responseObserver) { Object response = responses.poll(); if (response instanceof ListRestorePlanBindingsResponse) { requests.add(request); responseObserver.onNext(((ListRestorePlanBindingsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method ListRestorePlanBindings, expected %s or" + " %s", response == null ? "null" : response.getClass().getName(), ListRestorePlanBindingsResponse.class.getName(), Exception.class.getName()))); } } @Override public void getRestorePlanBinding( GetRestorePlanBindingRequest request, StreamObserver<RestorePlanBinding> responseObserver) { Object response = responses.poll(); if (response instanceof RestorePlanBinding) { requests.add(request); responseObserver.onNext(((RestorePlanBinding) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method GetRestorePlanBinding, expected %s or" + " %s", response == null ? "null" : response.getClass().getName(), RestorePlanBinding.class.getName(), Exception.class.getName()))); } } @Override public void createRestore( CreateRestoreRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method CreateRestore, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void listRestores( ListRestoresRequest request, StreamObserver<ListRestoresResponse> responseObserver) { Object response = responses.poll(); if (response instanceof ListRestoresResponse) { requests.add(request); responseObserver.onNext(((ListRestoresResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method ListRestores, expected %s or %s", response == null ? "null" : response.getClass().getName(), ListRestoresResponse.class.getName(), Exception.class.getName()))); } } @Override public void getRestore(GetRestoreRequest request, StreamObserver<Restore> responseObserver) { Object response = responses.poll(); if (response instanceof Restore) { requests.add(request); responseObserver.onNext(((Restore) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method GetRestore, expected %s or %s", response == null ? "null" : response.getClass().getName(), Restore.class.getName(), Exception.class.getName()))); } } @Override public void updateRestore( UpdateRestoreRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method UpdateRestore, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void deleteRestore( DeleteRestoreRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method DeleteRestore, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void listVolumeRestores( ListVolumeRestoresRequest request, StreamObserver<ListVolumeRestoresResponse> responseObserver) { Object response = responses.poll(); if (response instanceof ListVolumeRestoresResponse) { requests.add(request); responseObserver.onNext(((ListVolumeRestoresResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method ListVolumeRestores, expected %s or %s", response == null ? "null" : response.getClass().getName(), ListVolumeRestoresResponse.class.getName(), Exception.class.getName()))); } } @Override public void getVolumeRestore( GetVolumeRestoreRequest request, StreamObserver<VolumeRestore> responseObserver) { Object response = responses.poll(); if (response instanceof VolumeRestore) { requests.add(request); responseObserver.onNext(((VolumeRestore) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method GetVolumeRestore, expected %s or %s", response == null ? "null" : response.getClass().getName(), VolumeRestore.class.getName(), Exception.class.getName()))); } } @Override public void getBackupIndexDownloadUrl( GetBackupIndexDownloadUrlRequest request, StreamObserver<GetBackupIndexDownloadUrlResponse> responseObserver) { Object response = responses.poll(); if (response instanceof GetBackupIndexDownloadUrlResponse) { requests.add(request); responseObserver.onNext(((GetBackupIndexDownloadUrlResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method GetBackupIndexDownloadUrl, expected %s" + " or %s", response == null ? "null" : response.getClass().getName(), GetBackupIndexDownloadUrlResponse.class.getName(), Exception.class.getName()))); } } }
googleapis/google-cloud-java
35,298
java-admanager/proto-ad-manager-v1/src/main/java/com/google/ads/admanager/v1/CustomTargetingClause.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/admanager/v1/targeting.proto // Protobuf Java Version: 3.25.8 package com.google.ads.admanager.v1; /** * * * <pre> * Represents a logical AND of individual custom targeting expressions. * </pre> * * Protobuf type {@code google.ads.admanager.v1.CustomTargetingClause} */ public final class CustomTargetingClause extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.admanager.v1.CustomTargetingClause) CustomTargetingClauseOrBuilder { private static final long serialVersionUID = 0L; // Use CustomTargetingClause.newBuilder() to construct. private CustomTargetingClause(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CustomTargetingClause() { customTargetingLiterals_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CustomTargetingClause(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.admanager.v1.TargetingProto .internal_static_google_ads_admanager_v1_CustomTargetingClause_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.admanager.v1.TargetingProto .internal_static_google_ads_admanager_v1_CustomTargetingClause_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.admanager.v1.CustomTargetingClause.class, com.google.ads.admanager.v1.CustomTargetingClause.Builder.class); } public static final int CUSTOM_TARGETING_LITERALS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.ads.admanager.v1.CustomTargetingLiteral> customTargetingLiterals_; /** * * * <pre> * Optional. Leaf targeting expressions for custom key/values. * </pre> * * <code> * repeated .google.ads.admanager.v1.CustomTargetingLiteral custom_targeting_literals = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public java.util.List<com.google.ads.admanager.v1.CustomTargetingLiteral> getCustomTargetingLiteralsList() { return customTargetingLiterals_; } /** * * * <pre> * Optional. Leaf targeting expressions for custom key/values. * </pre> * * <code> * repeated .google.ads.admanager.v1.CustomTargetingLiteral custom_targeting_literals = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public java.util.List<? extends com.google.ads.admanager.v1.CustomTargetingLiteralOrBuilder> getCustomTargetingLiteralsOrBuilderList() { return customTargetingLiterals_; } /** * * * <pre> * Optional. Leaf targeting expressions for custom key/values. * </pre> * * <code> * repeated .google.ads.admanager.v1.CustomTargetingLiteral custom_targeting_literals = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public int getCustomTargetingLiteralsCount() { return customTargetingLiterals_.size(); } /** * * * <pre> * Optional. Leaf targeting expressions for custom key/values. * </pre> * * <code> * repeated .google.ads.admanager.v1.CustomTargetingLiteral custom_targeting_literals = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public com.google.ads.admanager.v1.CustomTargetingLiteral getCustomTargetingLiterals(int index) { return customTargetingLiterals_.get(index); } /** * * * <pre> * Optional. Leaf targeting expressions for custom key/values. * </pre> * * <code> * repeated .google.ads.admanager.v1.CustomTargetingLiteral custom_targeting_literals = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public com.google.ads.admanager.v1.CustomTargetingLiteralOrBuilder getCustomTargetingLiteralsOrBuilder(int index) { return customTargetingLiterals_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < customTargetingLiterals_.size(); i++) { output.writeMessage(1, customTargetingLiterals_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < customTargetingLiterals_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 1, customTargetingLiterals_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.admanager.v1.CustomTargetingClause)) { return super.equals(obj); } com.google.ads.admanager.v1.CustomTargetingClause other = (com.google.ads.admanager.v1.CustomTargetingClause) obj; if (!getCustomTargetingLiteralsList().equals(other.getCustomTargetingLiteralsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getCustomTargetingLiteralsCount() > 0) { hash = (37 * hash) + CUSTOM_TARGETING_LITERALS_FIELD_NUMBER; hash = (53 * hash) + getCustomTargetingLiteralsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.admanager.v1.CustomTargetingClause parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.admanager.v1.CustomTargetingClause parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.admanager.v1.CustomTargetingClause parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.admanager.v1.CustomTargetingClause parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.admanager.v1.CustomTargetingClause parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.admanager.v1.CustomTargetingClause parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.admanager.v1.CustomTargetingClause parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.ads.admanager.v1.CustomTargetingClause parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.ads.admanager.v1.CustomTargetingClause parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.admanager.v1.CustomTargetingClause parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.ads.admanager.v1.CustomTargetingClause parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.ads.admanager.v1.CustomTargetingClause parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.admanager.v1.CustomTargetingClause prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Represents a logical AND of individual custom targeting expressions. * </pre> * * Protobuf type {@code google.ads.admanager.v1.CustomTargetingClause} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.admanager.v1.CustomTargetingClause) com.google.ads.admanager.v1.CustomTargetingClauseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.admanager.v1.TargetingProto .internal_static_google_ads_admanager_v1_CustomTargetingClause_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.admanager.v1.TargetingProto .internal_static_google_ads_admanager_v1_CustomTargetingClause_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.admanager.v1.CustomTargetingClause.class, com.google.ads.admanager.v1.CustomTargetingClause.Builder.class); } // Construct using com.google.ads.admanager.v1.CustomTargetingClause.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (customTargetingLiteralsBuilder_ == null) { customTargetingLiterals_ = java.util.Collections.emptyList(); } else { customTargetingLiterals_ = null; customTargetingLiteralsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.admanager.v1.TargetingProto .internal_static_google_ads_admanager_v1_CustomTargetingClause_descriptor; } @java.lang.Override public com.google.ads.admanager.v1.CustomTargetingClause getDefaultInstanceForType() { return com.google.ads.admanager.v1.CustomTargetingClause.getDefaultInstance(); } @java.lang.Override public com.google.ads.admanager.v1.CustomTargetingClause build() { com.google.ads.admanager.v1.CustomTargetingClause result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.admanager.v1.CustomTargetingClause buildPartial() { com.google.ads.admanager.v1.CustomTargetingClause result = new com.google.ads.admanager.v1.CustomTargetingClause(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.ads.admanager.v1.CustomTargetingClause result) { if (customTargetingLiteralsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { customTargetingLiterals_ = java.util.Collections.unmodifiableList(customTargetingLiterals_); bitField0_ = (bitField0_ & ~0x00000001); } result.customTargetingLiterals_ = customTargetingLiterals_; } else { result.customTargetingLiterals_ = customTargetingLiteralsBuilder_.build(); } } private void buildPartial0(com.google.ads.admanager.v1.CustomTargetingClause result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.admanager.v1.CustomTargetingClause) { return mergeFrom((com.google.ads.admanager.v1.CustomTargetingClause) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.admanager.v1.CustomTargetingClause other) { if (other == com.google.ads.admanager.v1.CustomTargetingClause.getDefaultInstance()) return this; if (customTargetingLiteralsBuilder_ == null) { if (!other.customTargetingLiterals_.isEmpty()) { if (customTargetingLiterals_.isEmpty()) { customTargetingLiterals_ = other.customTargetingLiterals_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureCustomTargetingLiteralsIsMutable(); customTargetingLiterals_.addAll(other.customTargetingLiterals_); } onChanged(); } } else { if (!other.customTargetingLiterals_.isEmpty()) { if (customTargetingLiteralsBuilder_.isEmpty()) { customTargetingLiteralsBuilder_.dispose(); customTargetingLiteralsBuilder_ = null; customTargetingLiterals_ = other.customTargetingLiterals_; bitField0_ = (bitField0_ & ~0x00000001); customTargetingLiteralsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getCustomTargetingLiteralsFieldBuilder() : null; } else { customTargetingLiteralsBuilder_.addAllMessages(other.customTargetingLiterals_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.ads.admanager.v1.CustomTargetingLiteral m = input.readMessage( com.google.ads.admanager.v1.CustomTargetingLiteral.parser(), extensionRegistry); if (customTargetingLiteralsBuilder_ == null) { ensureCustomTargetingLiteralsIsMutable(); customTargetingLiterals_.add(m); } else { customTargetingLiteralsBuilder_.addMessage(m); } break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.ads.admanager.v1.CustomTargetingLiteral> customTargetingLiterals_ = java.util.Collections.emptyList(); private void ensureCustomTargetingLiteralsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { customTargetingLiterals_ = new java.util.ArrayList<com.google.ads.admanager.v1.CustomTargetingLiteral>( customTargetingLiterals_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.admanager.v1.CustomTargetingLiteral, com.google.ads.admanager.v1.CustomTargetingLiteral.Builder, com.google.ads.admanager.v1.CustomTargetingLiteralOrBuilder> customTargetingLiteralsBuilder_; /** * * * <pre> * Optional. Leaf targeting expressions for custom key/values. * </pre> * * <code> * repeated .google.ads.admanager.v1.CustomTargetingLiteral custom_targeting_literals = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public java.util.List<com.google.ads.admanager.v1.CustomTargetingLiteral> getCustomTargetingLiteralsList() { if (customTargetingLiteralsBuilder_ == null) { return java.util.Collections.unmodifiableList(customTargetingLiterals_); } else { return customTargetingLiteralsBuilder_.getMessageList(); } } /** * * * <pre> * Optional. Leaf targeting expressions for custom key/values. * </pre> * * <code> * repeated .google.ads.admanager.v1.CustomTargetingLiteral custom_targeting_literals = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public int getCustomTargetingLiteralsCount() { if (customTargetingLiteralsBuilder_ == null) { return customTargetingLiterals_.size(); } else { return customTargetingLiteralsBuilder_.getCount(); } } /** * * * <pre> * Optional. Leaf targeting expressions for custom key/values. * </pre> * * <code> * repeated .google.ads.admanager.v1.CustomTargetingLiteral custom_targeting_literals = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.ads.admanager.v1.CustomTargetingLiteral getCustomTargetingLiterals( int index) { if (customTargetingLiteralsBuilder_ == null) { return customTargetingLiterals_.get(index); } else { return customTargetingLiteralsBuilder_.getMessage(index); } } /** * * * <pre> * Optional. Leaf targeting expressions for custom key/values. * </pre> * * <code> * repeated .google.ads.admanager.v1.CustomTargetingLiteral custom_targeting_literals = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setCustomTargetingLiterals( int index, com.google.ads.admanager.v1.CustomTargetingLiteral value) { if (customTargetingLiteralsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCustomTargetingLiteralsIsMutable(); customTargetingLiterals_.set(index, value); onChanged(); } else { customTargetingLiteralsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Optional. Leaf targeting expressions for custom key/values. * </pre> * * <code> * repeated .google.ads.admanager.v1.CustomTargetingLiteral custom_targeting_literals = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setCustomTargetingLiterals( int index, com.google.ads.admanager.v1.CustomTargetingLiteral.Builder builderForValue) { if (customTargetingLiteralsBuilder_ == null) { ensureCustomTargetingLiteralsIsMutable(); customTargetingLiterals_.set(index, builderForValue.build()); onChanged(); } else { customTargetingLiteralsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Optional. Leaf targeting expressions for custom key/values. * </pre> * * <code> * repeated .google.ads.admanager.v1.CustomTargetingLiteral custom_targeting_literals = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder addCustomTargetingLiterals( com.google.ads.admanager.v1.CustomTargetingLiteral value) { if (customTargetingLiteralsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCustomTargetingLiteralsIsMutable(); customTargetingLiterals_.add(value); onChanged(); } else { customTargetingLiteralsBuilder_.addMessage(value); } return this; } /** * * * <pre> * Optional. Leaf targeting expressions for custom key/values. * </pre> * * <code> * repeated .google.ads.admanager.v1.CustomTargetingLiteral custom_targeting_literals = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder addCustomTargetingLiterals( int index, com.google.ads.admanager.v1.CustomTargetingLiteral value) { if (customTargetingLiteralsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCustomTargetingLiteralsIsMutable(); customTargetingLiterals_.add(index, value); onChanged(); } else { customTargetingLiteralsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Optional. Leaf targeting expressions for custom key/values. * </pre> * * <code> * repeated .google.ads.admanager.v1.CustomTargetingLiteral custom_targeting_literals = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder addCustomTargetingLiterals( com.google.ads.admanager.v1.CustomTargetingLiteral.Builder builderForValue) { if (customTargetingLiteralsBuilder_ == null) { ensureCustomTargetingLiteralsIsMutable(); customTargetingLiterals_.add(builderForValue.build()); onChanged(); } else { customTargetingLiteralsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Optional. Leaf targeting expressions for custom key/values. * </pre> * * <code> * repeated .google.ads.admanager.v1.CustomTargetingLiteral custom_targeting_literals = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder addCustomTargetingLiterals( int index, com.google.ads.admanager.v1.CustomTargetingLiteral.Builder builderForValue) { if (customTargetingLiteralsBuilder_ == null) { ensureCustomTargetingLiteralsIsMutable(); customTargetingLiterals_.add(index, builderForValue.build()); onChanged(); } else { customTargetingLiteralsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Optional. Leaf targeting expressions for custom key/values. * </pre> * * <code> * repeated .google.ads.admanager.v1.CustomTargetingLiteral custom_targeting_literals = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder addAllCustomTargetingLiterals( java.lang.Iterable<? extends com.google.ads.admanager.v1.CustomTargetingLiteral> values) { if (customTargetingLiteralsBuilder_ == null) { ensureCustomTargetingLiteralsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, customTargetingLiterals_); onChanged(); } else { customTargetingLiteralsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Optional. Leaf targeting expressions for custom key/values. * </pre> * * <code> * repeated .google.ads.admanager.v1.CustomTargetingLiteral custom_targeting_literals = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder clearCustomTargetingLiterals() { if (customTargetingLiteralsBuilder_ == null) { customTargetingLiterals_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { customTargetingLiteralsBuilder_.clear(); } return this; } /** * * * <pre> * Optional. Leaf targeting expressions for custom key/values. * </pre> * * <code> * repeated .google.ads.admanager.v1.CustomTargetingLiteral custom_targeting_literals = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder removeCustomTargetingLiterals(int index) { if (customTargetingLiteralsBuilder_ == null) { ensureCustomTargetingLiteralsIsMutable(); customTargetingLiterals_.remove(index); onChanged(); } else { customTargetingLiteralsBuilder_.remove(index); } return this; } /** * * * <pre> * Optional. Leaf targeting expressions for custom key/values. * </pre> * * <code> * repeated .google.ads.admanager.v1.CustomTargetingLiteral custom_targeting_literals = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.ads.admanager.v1.CustomTargetingLiteral.Builder getCustomTargetingLiteralsBuilder(int index) { return getCustomTargetingLiteralsFieldBuilder().getBuilder(index); } /** * * * <pre> * Optional. Leaf targeting expressions for custom key/values. * </pre> * * <code> * repeated .google.ads.admanager.v1.CustomTargetingLiteral custom_targeting_literals = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.ads.admanager.v1.CustomTargetingLiteralOrBuilder getCustomTargetingLiteralsOrBuilder(int index) { if (customTargetingLiteralsBuilder_ == null) { return customTargetingLiterals_.get(index); } else { return customTargetingLiteralsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Optional. Leaf targeting expressions for custom key/values. * </pre> * * <code> * repeated .google.ads.admanager.v1.CustomTargetingLiteral custom_targeting_literals = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public java.util.List<? extends com.google.ads.admanager.v1.CustomTargetingLiteralOrBuilder> getCustomTargetingLiteralsOrBuilderList() { if (customTargetingLiteralsBuilder_ != null) { return customTargetingLiteralsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(customTargetingLiterals_); } } /** * * * <pre> * Optional. Leaf targeting expressions for custom key/values. * </pre> * * <code> * repeated .google.ads.admanager.v1.CustomTargetingLiteral custom_targeting_literals = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.ads.admanager.v1.CustomTargetingLiteral.Builder addCustomTargetingLiteralsBuilder() { return getCustomTargetingLiteralsFieldBuilder() .addBuilder(com.google.ads.admanager.v1.CustomTargetingLiteral.getDefaultInstance()); } /** * * * <pre> * Optional. Leaf targeting expressions for custom key/values. * </pre> * * <code> * repeated .google.ads.admanager.v1.CustomTargetingLiteral custom_targeting_literals = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.ads.admanager.v1.CustomTargetingLiteral.Builder addCustomTargetingLiteralsBuilder(int index) { return getCustomTargetingLiteralsFieldBuilder() .addBuilder( index, com.google.ads.admanager.v1.CustomTargetingLiteral.getDefaultInstance()); } /** * * * <pre> * Optional. Leaf targeting expressions for custom key/values. * </pre> * * <code> * repeated .google.ads.admanager.v1.CustomTargetingLiteral custom_targeting_literals = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public java.util.List<com.google.ads.admanager.v1.CustomTargetingLiteral.Builder> getCustomTargetingLiteralsBuilderList() { return getCustomTargetingLiteralsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.admanager.v1.CustomTargetingLiteral, com.google.ads.admanager.v1.CustomTargetingLiteral.Builder, com.google.ads.admanager.v1.CustomTargetingLiteralOrBuilder> getCustomTargetingLiteralsFieldBuilder() { if (customTargetingLiteralsBuilder_ == null) { customTargetingLiteralsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.admanager.v1.CustomTargetingLiteral, com.google.ads.admanager.v1.CustomTargetingLiteral.Builder, com.google.ads.admanager.v1.CustomTargetingLiteralOrBuilder>( customTargetingLiterals_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); customTargetingLiterals_ = null; } return customTargetingLiteralsBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.admanager.v1.CustomTargetingClause) } // @@protoc_insertion_point(class_scope:google.ads.admanager.v1.CustomTargetingClause) private static final com.google.ads.admanager.v1.CustomTargetingClause DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.admanager.v1.CustomTargetingClause(); } public static com.google.ads.admanager.v1.CustomTargetingClause getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CustomTargetingClause> PARSER = new com.google.protobuf.AbstractParser<CustomTargetingClause>() { @java.lang.Override public CustomTargetingClause parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CustomTargetingClause> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CustomTargetingClause> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.admanager.v1.CustomTargetingClause getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/kafka
35,501
group-coordinator/src/test/java/org/apache/kafka/coordinator/group/streams/CurrentAssignmentBuilderTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.coordinator.group.streams; import org.apache.kafka.common.Uuid; import org.apache.kafka.common.errors.FencedMemberEpochException; import org.apache.kafka.coordinator.group.streams.TaskAssignmentTestUtil.TaskRole; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.EnumSource; import java.util.Set; import static org.apache.kafka.coordinator.group.streams.TaskAssignmentTestUtil.mkTasks; import static org.apache.kafka.coordinator.group.streams.TaskAssignmentTestUtil.mkTasksTuple; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; public class CurrentAssignmentBuilderTest { private static final String SUBTOPOLOGY_ID1 = Uuid.randomUuid().toString(); private static final String SUBTOPOLOGY_ID2 = Uuid.randomUuid().toString(); private static final String PROCESS_ID = "process_id"; private static final String MEMBER_NAME = "member"; @ParameterizedTest @EnumSource(TaskRole.class) public void testStableToStable(TaskRole taskRole) { final int memberEpoch = 10; StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.STABLE) .setProcessId(PROCESS_ID) .setMemberEpoch(memberEpoch) .setPreviousMemberEpoch(memberEpoch) .setAssignedTasks( mkTasksTuple( taskRole, mkTasks(SUBTOPOLOGY_ID1, 1, 2), mkTasks(SUBTOPOLOGY_ID2, 3, 4))) .setTasksPendingRevocation(TasksTuple.EMPTY) .build(); StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member) .withTargetAssignment(memberEpoch + 1, mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 1, 2), mkTasks(SUBTOPOLOGY_ID2, 3, 4))) .withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID) .withCurrentStandbyTaskProcessIds( (subtopologyId, partitionId) -> Set.of()) .withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of()) .build(); assertEquals( new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.STABLE) .setProcessId(PROCESS_ID) .setMemberEpoch(memberEpoch + 1) .setPreviousMemberEpoch(memberEpoch) .setAssignedTasks(mkTasksTuple( taskRole, mkTasks(SUBTOPOLOGY_ID1, 1, 2), mkTasks(SUBTOPOLOGY_ID2, 3, 4))) .setTasksPendingRevocation(TasksTuple.EMPTY) .build(), updatedMember ); } @ParameterizedTest @EnumSource(TaskRole.class) public void testStableToStableAtTargetEpoch(TaskRole taskRole) { final int memberEpoch = 10; StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.STABLE) .setProcessId(PROCESS_ID) .setMemberEpoch(memberEpoch) .setPreviousMemberEpoch(memberEpoch) .setAssignedTasks( mkTasksTuple( taskRole, mkTasks(SUBTOPOLOGY_ID1, 1, 2), mkTasks(SUBTOPOLOGY_ID2, 3, 4))) .setTasksPendingRevocation(TasksTuple.EMPTY) .build(); StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member) .withTargetAssignment(memberEpoch, mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 1, 2), mkTasks(SUBTOPOLOGY_ID2, 3, 4))) .withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID) .withCurrentStandbyTaskProcessIds( (subtopologyId, partitionId) -> Set.of()) .withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of()) .build(); assertEquals( new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.STABLE) .setProcessId(PROCESS_ID) .setMemberEpoch(memberEpoch) .setPreviousMemberEpoch(memberEpoch) .setAssignedTasks(mkTasksTuple( taskRole, mkTasks(SUBTOPOLOGY_ID1, 1, 2), mkTasks(SUBTOPOLOGY_ID2, 3, 4))) .setTasksPendingRevocation(TasksTuple.EMPTY) .build(), updatedMember ); } @ParameterizedTest @EnumSource(TaskRole.class) public void testStableToStableWithNewTasks(TaskRole taskRole) { final int memberEpoch = 10; StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.STABLE) .setProcessId(PROCESS_ID) .setMemberEpoch(memberEpoch) .setPreviousMemberEpoch(memberEpoch) .setAssignedTasks(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 1, 2), mkTasks(SUBTOPOLOGY_ID2, 3, 4))) .setTasksPendingRevocation(TasksTuple.EMPTY) .build(); StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member) .withTargetAssignment(memberEpoch + 1, mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 1, 2, 4), mkTasks(SUBTOPOLOGY_ID2, 3, 4, 7))) .withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> null) .withCurrentStandbyTaskProcessIds( (subtopologyId, partitionId) -> Set.of()) .withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of()) .build(); assertEquals( new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.STABLE) .setProcessId(PROCESS_ID) .setMemberEpoch(memberEpoch + 1) .setPreviousMemberEpoch(memberEpoch) .setAssignedTasks(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 1, 2, 4), mkTasks(SUBTOPOLOGY_ID2, 3, 4, 7))) .setTasksPendingRevocation(TasksTuple.EMPTY) .build(), updatedMember ); } @ParameterizedTest @EnumSource(TaskRole.class) public void testStableToUnrevokedTasks(TaskRole taskRole) { final int memberEpoch = 10; StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.STABLE) .setProcessId(PROCESS_ID) .setMemberEpoch(memberEpoch) .setPreviousMemberEpoch(memberEpoch) .setAssignedTasks(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 1, 2), mkTasks(SUBTOPOLOGY_ID2, 3, 4))) .setTasksPendingRevocation(TasksTuple.EMPTY) .build(); StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member) .withTargetAssignment(memberEpoch + 1, mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 2, 3), mkTasks(SUBTOPOLOGY_ID2, 4, 5))) .withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> null) .withCurrentStandbyTaskProcessIds( (subtopologyId, partitionId) -> Set.of()) .withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of()) .build(); assertEquals( new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.UNREVOKED_TASKS) .setProcessId(PROCESS_ID) .setMemberEpoch(memberEpoch) .setPreviousMemberEpoch(memberEpoch) .setAssignedTasks(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 2), mkTasks(SUBTOPOLOGY_ID2, 4))) .setTasksPendingRevocation(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 1), mkTasks(SUBTOPOLOGY_ID2, 3))) .build(), updatedMember ); } @ParameterizedTest @EnumSource(TaskRole.class) public void testStableToUnrevokedWithEmptyAssignment(TaskRole taskRole) { final int memberEpoch = 10; StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.STABLE) .setProcessId(PROCESS_ID) .setMemberEpoch(memberEpoch) .setPreviousMemberEpoch(memberEpoch) .setAssignedTasks( mkTasksTuple( taskRole, mkTasks(SUBTOPOLOGY_ID1, 1, 2), mkTasks(SUBTOPOLOGY_ID2, 3, 4))) .setTasksPendingRevocation(TasksTuple.EMPTY) .build(); StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member) .withTargetAssignment(memberEpoch + 1, TasksTuple.EMPTY) .withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID) .withCurrentStandbyTaskProcessIds( (subtopologyId, partitionId) -> Set.of()) .withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of()) .build(); assertEquals( new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.UNREVOKED_TASKS) .setProcessId(PROCESS_ID) .setMemberEpoch(memberEpoch) .setPreviousMemberEpoch(memberEpoch) .setAssignedTasks(TasksTuple.EMPTY) .setTasksPendingRevocation( mkTasksTuple( taskRole, mkTasks(SUBTOPOLOGY_ID1, 1, 2), mkTasks(SUBTOPOLOGY_ID2, 3, 4))) .build(), updatedMember ); } @ParameterizedTest @EnumSource(TaskRole.class) public void testStableToUnreleasedTasks(TaskRole taskRole) { final int memberEpoch = 10; StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.STABLE) .setProcessId(PROCESS_ID) .setMemberEpoch(memberEpoch) .setPreviousMemberEpoch(memberEpoch) .setAssignedTasks(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 1, 2), mkTasks(SUBTOPOLOGY_ID2, 3, 4))) .setTasksPendingRevocation(TasksTuple.EMPTY) .build(); StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member) .withTargetAssignment(memberEpoch + 1, mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 1, 2, 4), mkTasks(SUBTOPOLOGY_ID2, 3, 4, 7))) .withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID) .withCurrentStandbyTaskProcessIds( (subtopologyId, partitionId) -> Set.of()) .withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of()) .build(); assertEquals( new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.UNRELEASED_TASKS) .setProcessId(PROCESS_ID) .setMemberEpoch(memberEpoch + 1) .setPreviousMemberEpoch(memberEpoch) .setAssignedTasks(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 1, 2), mkTasks(SUBTOPOLOGY_ID2, 3, 4))) .setTasksPendingRevocation(TasksTuple.EMPTY) .build(), updatedMember ); } @ParameterizedTest @EnumSource(TaskRole.class) public void testStableToUnreleasedTasksWithOwnedTasksNotHavingRevokedTasks(TaskRole taskRole) { final int memberEpoch = 10; StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.STABLE) .setProcessId(PROCESS_ID) .setMemberEpoch(memberEpoch) .setPreviousMemberEpoch(memberEpoch) .setAssignedTasks(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 1, 2), mkTasks(SUBTOPOLOGY_ID2, 3, 4))) .setTasksPendingRevocation(TasksTuple.EMPTY) .build(); StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member) .withTargetAssignment(memberEpoch + 1, mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 1, 2), mkTasks(SUBTOPOLOGY_ID2, 3, 5))) .withCurrentActiveTaskProcessId((subtopologyId, __) -> SUBTOPOLOGY_ID2.equals(subtopologyId) ? PROCESS_ID : null ) .withCurrentStandbyTaskProcessIds( (subtopologyId, partitionId) -> Set.of()) .withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of()) .withOwnedAssignment(mkTasksTuple(taskRole)) .build(); assertEquals( new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.UNRELEASED_TASKS) .setProcessId(PROCESS_ID) .setMemberEpoch(memberEpoch + 1) .setPreviousMemberEpoch(memberEpoch) .setAssignedTasks(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 1, 2), mkTasks(SUBTOPOLOGY_ID2, 3))) .setTasksPendingRevocation(TasksTuple.EMPTY) .build(), updatedMember ); } @ParameterizedTest @EnumSource(TaskRole.class) public void testUnrevokedTasksToStable(TaskRole taskRole) { final int memberEpoch = 10; StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.UNREVOKED_TASKS) .setProcessId(PROCESS_ID) .setMemberEpoch(memberEpoch) .setPreviousMemberEpoch(memberEpoch) .setAssignedTasks(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 2, 3), mkTasks(SUBTOPOLOGY_ID2, 5, 6))) .setTasksPendingRevocation(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 1), mkTasks(SUBTOPOLOGY_ID2, 4))) .build(); StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member) .withTargetAssignment(memberEpoch + 1, mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 2, 3), mkTasks(SUBTOPOLOGY_ID2, 5, 6))) .withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> null) .withCurrentStandbyTaskProcessIds( (subtopologyId, partitionId) -> Set.of()) .withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of()) .withOwnedAssignment(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 2, 3), mkTasks(SUBTOPOLOGY_ID2, 5, 6))) .build(); assertEquals( new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.STABLE) .setProcessId(PROCESS_ID) .setMemberEpoch(memberEpoch + 1) .setPreviousMemberEpoch(memberEpoch) .setAssignedTasks(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 2, 3), mkTasks(SUBTOPOLOGY_ID2, 5, 6))) .setTasksPendingRevocation(TasksTuple.EMPTY) .build(), updatedMember ); } @ParameterizedTest @EnumSource(TaskRole.class) public void testRemainsInUnrevokedTasks(TaskRole taskRole) { final int memberEpoch = 10; StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.UNREVOKED_TASKS) .setProcessId(PROCESS_ID) .setMemberEpoch(memberEpoch) .setPreviousMemberEpoch(memberEpoch) .setAssignedTasks(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 2, 3), mkTasks(SUBTOPOLOGY_ID2, 5, 6))) .setTasksPendingRevocation(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 1), mkTasks(SUBTOPOLOGY_ID2, 4))) .build(); CurrentAssignmentBuilder currentAssignmentBuilder = new CurrentAssignmentBuilder( member) .withTargetAssignment(memberEpoch + 2, mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 3), mkTasks(SUBTOPOLOGY_ID2, 6))) .withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> null) .withCurrentStandbyTaskProcessIds( (subtopologyId, partitionId) -> Set.of()) .withCurrentWarmupTaskProcessIds( (subtopologyId, partitionId) -> Set.of()); assertEquals( member, currentAssignmentBuilder .withOwnedAssignment(null) .build() ); assertEquals( member, currentAssignmentBuilder .withOwnedAssignment(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 1, 2, 3), mkTasks(SUBTOPOLOGY_ID2, 5, 6))) .build() ); assertEquals( member, currentAssignmentBuilder .withOwnedAssignment(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 2, 3), mkTasks(SUBTOPOLOGY_ID2, 4, 5, 6))) .build() ); } @ParameterizedTest @EnumSource(TaskRole.class) public void testUnrevokedTasksToUnrevokedTasks(TaskRole taskRole) { final int memberEpoch = 10; StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.UNREVOKED_TASKS) .setProcessId(PROCESS_ID) .setMemberEpoch(memberEpoch) .setPreviousMemberEpoch(memberEpoch) .setAssignedTasks(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 2, 3), mkTasks(SUBTOPOLOGY_ID2, 5, 6))) .setTasksPendingRevocation(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 1), mkTasks(SUBTOPOLOGY_ID2, 4))) .build(); StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member) .withTargetAssignment(memberEpoch + 2, mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 3), mkTasks(SUBTOPOLOGY_ID2, 6))) .withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> null) .withOwnedAssignment(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 2, 3), mkTasks(SUBTOPOLOGY_ID2, 5, 6))) .build(); assertEquals( new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.UNREVOKED_TASKS) .setProcessId(PROCESS_ID) .setMemberEpoch(memberEpoch + 1) .setPreviousMemberEpoch(memberEpoch) .setAssignedTasks(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 3), mkTasks(SUBTOPOLOGY_ID2, 6))) .setTasksPendingRevocation(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 2), mkTasks(SUBTOPOLOGY_ID2, 5))) .build(), updatedMember ); } @ParameterizedTest @EnumSource(TaskRole.class) public void testUnrevokedTasksToUnreleasedTasks(TaskRole taskRole) { final int memberEpoch = 11; StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.UNREVOKED_TASKS) .setProcessId(PROCESS_ID) .setMemberEpoch(memberEpoch) .setPreviousMemberEpoch(memberEpoch - 1) .setAssignedTasks(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 2, 3), mkTasks(SUBTOPOLOGY_ID2, 5, 6))) .setTasksPendingRevocation(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 1), mkTasks(SUBTOPOLOGY_ID2, 4))) .build(); StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member) .withTargetAssignment(memberEpoch, mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 2, 3, 4), mkTasks(SUBTOPOLOGY_ID2, 5, 6, 7))) .withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID) .withCurrentStandbyTaskProcessIds( (subtopologyId, partitionId) -> Set.of()) .withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of()) .withOwnedAssignment(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 2, 3), mkTasks(SUBTOPOLOGY_ID2, 5, 6)) ) .build(); assertEquals( new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.UNRELEASED_TASKS) .setProcessId(PROCESS_ID) .setMemberEpoch(memberEpoch) .setPreviousMemberEpoch(memberEpoch) .setAssignedTasks(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 2, 3), mkTasks(SUBTOPOLOGY_ID2, 5, 6))) .setTasksPendingRevocation(TasksTuple.EMPTY) .build(), updatedMember ); } @ParameterizedTest @EnumSource(TaskRole.class) public void testUnreleasedTasksToStable(TaskRole taskRole) { final int memberEpoch = 11; StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.UNRELEASED_TASKS) .setProcessId("process1") .setMemberEpoch(memberEpoch) .setPreviousMemberEpoch(memberEpoch) .setAssignedTasks(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 2, 3), mkTasks(SUBTOPOLOGY_ID2, 5, 6))) .setTasksPendingRevocation(TasksTuple.EMPTY) .build(); StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member) .withTargetAssignment(memberEpoch + 1, mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 2, 3), mkTasks(SUBTOPOLOGY_ID2, 5, 6))) .withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID) .withCurrentStandbyTaskProcessIds( (subtopologyId, partitionId) -> Set.of(PROCESS_ID)) .withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of(PROCESS_ID)) .build(); assertEquals( new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.STABLE) .setProcessId("process1") .setMemberEpoch(memberEpoch + 1) .setPreviousMemberEpoch(memberEpoch) .setAssignedTasks(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 2, 3), mkTasks(SUBTOPOLOGY_ID2, 5, 6))) .setTasksPendingRevocation(TasksTuple.EMPTY) .build(), updatedMember ); } @ParameterizedTest @EnumSource(TaskRole.class) public void testUnreleasedTasksToStableWithNewTasks(TaskRole taskRole) { int memberEpoch = 11; StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.UNRELEASED_TASKS) .setProcessId("process1") .setMemberEpoch(memberEpoch) .setPreviousMemberEpoch(memberEpoch) .setAssignedTasks(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 2, 3), mkTasks(SUBTOPOLOGY_ID2, 5, 6))) .setTasksPendingRevocation(TasksTuple.EMPTY) .build(); StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member) .withTargetAssignment(memberEpoch, mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 2, 3, 4), mkTasks(SUBTOPOLOGY_ID2, 5, 6, 7))) .withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> null) .withCurrentStandbyTaskProcessIds( (subtopologyId, partitionId) -> Set.of()) .withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of()) .build(); assertEquals( new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.STABLE) .setProcessId("process1") .setMemberEpoch(memberEpoch) .setPreviousMemberEpoch(memberEpoch) .setAssignedTasks(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 2, 3, 4), mkTasks(SUBTOPOLOGY_ID2, 5, 6, 7))) .setTasksPendingRevocation(TasksTuple.EMPTY) .build(), updatedMember ); } @ParameterizedTest @EnumSource(TaskRole.class) public void testUnreleasedTasksToUnreleasedTasks(TaskRole taskRole) { int memberEpoch = 11; StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.UNRELEASED_TASKS) .setProcessId(PROCESS_ID) .setMemberEpoch(memberEpoch) .setPreviousMemberEpoch(memberEpoch) .setAssignedTasks(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 2, 3), mkTasks(SUBTOPOLOGY_ID2, 5, 6))) .setTasksPendingRevocation(TasksTuple.EMPTY) .build(); StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member) .withTargetAssignment(memberEpoch, mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 2, 3, 4), mkTasks(SUBTOPOLOGY_ID2, 5, 6, 7))) .withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID) .withCurrentStandbyTaskProcessIds( (subtopologyId, partitionId) -> Set.of(PROCESS_ID)) .withCurrentWarmupTaskProcessIds( (subtopologyId, partitionId) -> Set.of(PROCESS_ID)) .build(); assertEquals(member, updatedMember); } @ParameterizedTest @EnumSource(TaskRole.class) public void testUnreleasedTasksToUnreleasedTasksOtherUnreleasedTaskRole(TaskRole taskRole) { int memberEpoch = 11; // The unreleased task is owned by a task of a different role on the same process. StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.UNRELEASED_TASKS) .setProcessId(PROCESS_ID) .setMemberEpoch(memberEpoch) .setPreviousMemberEpoch(memberEpoch) .setAssignedTasks(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 2, 3), mkTasks(SUBTOPOLOGY_ID2, 5, 6))) .setTasksPendingRevocation(TasksTuple.EMPTY) .build(); StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member) .withTargetAssignment(memberEpoch, mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 2, 3, 4), mkTasks(SUBTOPOLOGY_ID2, 5, 6, 7))) .withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> null) .withCurrentStandbyTaskProcessIds( (subtopologyId, partitionId) -> (taskRole == TaskRole.STANDBY) ? Set.of() : Set.of(PROCESS_ID)) .withCurrentWarmupTaskProcessIds( (subtopologyId, partitionId) -> (taskRole == TaskRole.STANDBY) ? Set.of(PROCESS_ID) : Set.of()) .build(); assertEquals(member, updatedMember); } @Test public void testUnreleasedTasksToUnreleasedTasksAnyActiveOwner() { int memberEpoch = 11; // The unreleased task remains unreleased, because it is owned by any other instance in // an active role, no matter the process. // The task that is not unreleased can be assigned. StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.UNRELEASED_TASKS) .setProcessId(PROCESS_ID) .setMemberEpoch(memberEpoch) .setPreviousMemberEpoch(memberEpoch) .setAssignedTasks(mkTasksTuple(TaskRole.ACTIVE, mkTasks(SUBTOPOLOGY_ID1, 2, 3), mkTasks(SUBTOPOLOGY_ID2, 5, 6))) .build(); StreamsGroupMember expectedMember = new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.UNRELEASED_TASKS) .setProcessId(PROCESS_ID) .setMemberEpoch(memberEpoch) .setPreviousMemberEpoch(memberEpoch) .setAssignedTasks(mkTasksTuple(TaskRole.ACTIVE, mkTasks(SUBTOPOLOGY_ID1, 2, 3), mkTasks(SUBTOPOLOGY_ID2, 5, 6, 7))) .setTasksPendingRevocation(TasksTuple.EMPTY) .build(); StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member) .withTargetAssignment(memberEpoch, mkTasksTuple(TaskRole.ACTIVE, mkTasks(SUBTOPOLOGY_ID1, 2, 3, 4), mkTasks(SUBTOPOLOGY_ID2, 5, 6, 7))) .withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> (subtopologyId.equals(SUBTOPOLOGY_ID1) && partitionId == 4) ? "anyOtherProcess" : null) .withCurrentStandbyTaskProcessIds( (subtopologyId, partitionId) -> Set.of()) .withCurrentWarmupTaskProcessIds( (subtopologyId, partitionId) -> Set.of()) .build(); assertEquals(expectedMember, updatedMember); } @ParameterizedTest @EnumSource(TaskRole.class) public void testUnreleasedTasksToUnrevokedTasks(TaskRole taskRole) { int memberEpoch = 11; StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.UNRELEASED_TASKS) .setProcessId("process1") .setMemberEpoch(memberEpoch) .setPreviousMemberEpoch(memberEpoch) .setAssignedTasks(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 2, 3), mkTasks(SUBTOPOLOGY_ID2, 5, 6))) .setTasksPendingRevocation(mkTasksTuple(TaskRole.ACTIVE, mkTasks(SUBTOPOLOGY_ID1, 4), mkTasks(SUBTOPOLOGY_ID2, 7))) .build(); StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member) .withTargetAssignment(memberEpoch + 1, mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 3), mkTasks(SUBTOPOLOGY_ID2, 6))) .withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID) .withCurrentStandbyTaskProcessIds( (subtopologyId, partitionId) -> Set.of()) .withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of()) .build(); assertEquals( new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.UNREVOKED_TASKS) .setProcessId("process1") .setMemberEpoch(memberEpoch) .setPreviousMemberEpoch(memberEpoch) .setAssignedTasks(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 3), mkTasks(SUBTOPOLOGY_ID2, 6))) .setTasksPendingRevocation(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 2), mkTasks(SUBTOPOLOGY_ID2, 5))) .build(), updatedMember ); } @ParameterizedTest @EnumSource(TaskRole.class) public void testUnknownState(TaskRole taskRole) { int memberEpoch = 11; StreamsGroupMember member = new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.UNKNOWN) .setMemberEpoch(memberEpoch) .setPreviousMemberEpoch(memberEpoch) .setProcessId(PROCESS_ID) .setAssignedTasks(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 3), mkTasks(SUBTOPOLOGY_ID2, 6))) .setTasksPendingRevocation(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 2), mkTasks(SUBTOPOLOGY_ID2, 5))) .build(); // When the member is in an unknown state, the member is first to force // a reset of the client side member state. assertThrows(FencedMemberEpochException.class, () -> new CurrentAssignmentBuilder(member) .withTargetAssignment(memberEpoch + 1, mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 3), mkTasks(SUBTOPOLOGY_ID2, 6))) .withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID) .build()); // Then the member rejoins with no owned tasks. StreamsGroupMember updatedMember = new CurrentAssignmentBuilder(member) .withTargetAssignment(memberEpoch + 1, mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 3), mkTasks(SUBTOPOLOGY_ID2, 6))) .withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID) .withCurrentStandbyTaskProcessIds( (subtopologyId, partitionId) -> Set.of()) .withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of()) .withOwnedAssignment(mkTasksTuple(taskRole)) .build(); assertEquals( new StreamsGroupMember.Builder(MEMBER_NAME) .setState(MemberState.STABLE) .setProcessId(PROCESS_ID) .setMemberEpoch(memberEpoch + 1) .setPreviousMemberEpoch(memberEpoch) .setAssignedTasks(mkTasksTuple(taskRole, mkTasks(SUBTOPOLOGY_ID1, 3), mkTasks(SUBTOPOLOGY_ID2, 6))) .setTasksPendingRevocation(TasksTuple.EMPTY) .build(), updatedMember ); } }
google/guava
35,429
android/guava/src/com/google/common/io/MoreFiles.java
/* * Copyright (C) 2013 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.io; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.collect.Iterables.getOnlyElement; import static java.nio.file.LinkOption.NOFOLLOW_LINKS; import static java.util.Objects.requireNonNull; import com.google.common.annotations.GwtIncompatible; import com.google.common.annotations.J2ktIncompatible; import com.google.common.base.Optional; import com.google.common.base.Predicate; import com.google.common.collect.ImmutableList; import com.google.common.graph.Traverser; import com.google.j2objc.annotations.J2ObjCIncompatible; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.channels.Channels; import java.nio.channels.SeekableByteChannel; import java.nio.charset.Charset; import java.nio.file.DirectoryIteratorException; import java.nio.file.DirectoryStream; import java.nio.file.FileAlreadyExistsException; import java.nio.file.FileSystemException; import java.nio.file.Files; import java.nio.file.LinkOption; import java.nio.file.NoSuchFileException; import java.nio.file.NotDirectoryException; import java.nio.file.OpenOption; import java.nio.file.Path; import java.nio.file.SecureDirectoryStream; import java.nio.file.StandardOpenOption; import java.nio.file.attribute.BasicFileAttributeView; import java.nio.file.attribute.BasicFileAttributes; import java.nio.file.attribute.FileAttribute; import java.nio.file.attribute.FileTime; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.stream.Stream; import org.jspecify.annotations.Nullable; /** * Static utilities for use with {@link Path} instances, intended to complement {@link Files}. * * <p>Many methods provided by Guava's {@code Files} class for {@link java.io.File} instances are * now available via the JDK's {@link java.nio.file.Files} class for {@code Path} - check the JDK's * class if a sibling method from {@code Files} appears to be missing from this class. * * @since 33.4.0 (but since 21.0 in the JRE flavor) * @author Colin Decker */ @J2ktIncompatible @GwtIncompatible @J2ObjCIncompatible // java.nio.file @IgnoreJRERequirement // Users will use this only if they're already using Path. public final class MoreFiles { private MoreFiles() {} /** * Returns a view of the given {@code path} as a {@link ByteSource}. * * <p>Any {@linkplain OpenOption open options} provided are used when opening streams to the file * and may affect the behavior of the returned source and the streams it provides. See {@link * StandardOpenOption} for the standard options that may be provided. Providing no options is * equivalent to providing the {@link StandardOpenOption#READ READ} option. */ public static ByteSource asByteSource(Path path, OpenOption... options) { return new PathByteSource(path, options); } @IgnoreJRERequirement // *should* be redundant with the one on MoreFiles itself private static final class PathByteSource extends ByteSource { private static final LinkOption[] FOLLOW_LINKS = {}; private final Path path; private final OpenOption[] options; private final boolean followLinks; private PathByteSource(Path path, OpenOption... options) { this.path = checkNotNull(path); this.options = options.clone(); this.followLinks = followLinks(this.options); // TODO(cgdecker): validate the provided options... for example, just WRITE seems wrong } private static boolean followLinks(OpenOption[] options) { for (OpenOption option : options) { if (option == NOFOLLOW_LINKS) { return false; } } return true; } @Override public InputStream openStream() throws IOException { return Files.newInputStream(path, options); } private BasicFileAttributes readAttributes() throws IOException { return Files.readAttributes( path, BasicFileAttributes.class, followLinks ? FOLLOW_LINKS : new LinkOption[] {NOFOLLOW_LINKS}); } @Override public Optional<Long> sizeIfKnown() { BasicFileAttributes attrs; try { attrs = readAttributes(); } catch (IOException e) { // Failed to get attributes; we don't know the size. return Optional.absent(); } // Don't return a size for directories or symbolic links; their sizes are implementation // specific and they can't be read as bytes using the read methods anyway. if (attrs.isDirectory() || attrs.isSymbolicLink()) { return Optional.absent(); } return Optional.of(attrs.size()); } @Override public long size() throws IOException { BasicFileAttributes attrs = readAttributes(); // Don't return a size for directories or symbolic links; their sizes are implementation // specific and they can't be read as bytes using the read methods anyway. if (attrs.isDirectory()) { throw new IOException("can't read: is a directory"); } else if (attrs.isSymbolicLink()) { throw new IOException("can't read: is a symbolic link"); } return attrs.size(); } @Override public byte[] read() throws IOException { try (SeekableByteChannel channel = Files.newByteChannel(path, options)) { return ByteStreams.toByteArray(Channels.newInputStream(channel), channel.size()); } } @Override public CharSource asCharSource(Charset charset) { if (options.length == 0) { // If no OpenOptions were passed, delegate to Files.lines, which could have performance // advantages. (If OpenOptions were passed we can't, because Files.lines doesn't have an // overload taking OpenOptions, meaning we can't guarantee the same behavior w.r.t. things // like following/not following symlinks.) return new AsCharSource(charset) { @SuppressWarnings("FilesLinesLeak") // the user needs to close it in this case @Override public Stream<String> lines() throws IOException { return Files.lines(path, charset); } }; } return super.asCharSource(charset); } @Override public String toString() { return "MoreFiles.asByteSource(" + path + ", " + Arrays.toString(options) + ")"; } } /** * Returns a view of the given {@code path} as a {@link ByteSink}. * * <p>Any {@linkplain OpenOption open options} provided are used when opening streams to the file * and may affect the behavior of the returned sink and the streams it provides. See {@link * StandardOpenOption} for the standard options that may be provided. Providing no options is * equivalent to providing the {@link StandardOpenOption#CREATE CREATE}, {@link * StandardOpenOption#TRUNCATE_EXISTING TRUNCATE_EXISTING} and {@link StandardOpenOption#WRITE * WRITE} options. */ public static ByteSink asByteSink(Path path, OpenOption... options) { return new PathByteSink(path, options); } @IgnoreJRERequirement // *should* be redundant with the one on MoreFiles itself private static final class PathByteSink extends ByteSink { private final Path path; private final OpenOption[] options; private PathByteSink(Path path, OpenOption... options) { this.path = checkNotNull(path); this.options = options.clone(); // TODO(cgdecker): validate the provided options... for example, just READ seems wrong } @Override public OutputStream openStream() throws IOException { return Files.newOutputStream(path, options); } @Override public String toString() { return "MoreFiles.asByteSink(" + path + ", " + Arrays.toString(options) + ")"; } } /** * Returns a view of the given {@code path} as a {@link CharSource} using the given {@code * charset}. * * <p>Any {@linkplain OpenOption open options} provided are used when opening streams to the file * and may affect the behavior of the returned source and the streams it provides. See {@link * StandardOpenOption} for the standard options that may be provided. Providing no options is * equivalent to providing the {@link StandardOpenOption#READ READ} option. */ public static CharSource asCharSource(Path path, Charset charset, OpenOption... options) { return asByteSource(path, options).asCharSource(charset); } /** * Returns a view of the given {@code path} as a {@link CharSink} using the given {@code charset}. * * <p>Any {@linkplain OpenOption open options} provided are used when opening streams to the file * and may affect the behavior of the returned sink and the streams it provides. See {@link * StandardOpenOption} for the standard options that may be provided. Providing no options is * equivalent to providing the {@link StandardOpenOption#CREATE CREATE}, {@link * StandardOpenOption#TRUNCATE_EXISTING TRUNCATE_EXISTING} and {@link StandardOpenOption#WRITE * WRITE} options. */ public static CharSink asCharSink(Path path, Charset charset, OpenOption... options) { return asByteSink(path, options).asCharSink(charset); } /** * Returns an immutable list of paths to the files contained in the given directory. * * @throws NoSuchFileException if the file does not exist <i>(optional specific exception)</i> * @throws NotDirectoryException if the file could not be opened because it is not a directory * <i>(optional specific exception)</i> * @throws IOException if an I/O error occurs */ public static ImmutableList<Path> listFiles(Path dir) throws IOException { try (DirectoryStream<Path> stream = Files.newDirectoryStream(dir)) { return ImmutableList.copyOf(stream); } catch (DirectoryIteratorException e) { throw e.getCause(); } } /** * Returns a {@link Traverser} instance for the file and directory tree. The returned traverser * starts from a {@link Path} and will return all files and directories it encounters. * * <p>The returned traverser attempts to avoid following symbolic links to directories. However, * the traverser cannot guarantee that it will not follow symbolic links to directories as it is * possible for a directory to be replaced with a symbolic link between checking if the file is a * directory and actually reading the contents of that directory. * * <p>If the {@link Path} passed to one of the traversal methods does not exist or is not a * directory, no exception will be thrown and the returned {@link Iterable} will contain a single * element: that path. * * <p>{@link DirectoryIteratorException} may be thrown when iterating {@link Iterable} instances * created by this traverser if an {@link IOException} is thrown by a call to {@link * #listFiles(Path)}. * * <p>Example: {@code MoreFiles.fileTraverser().depthFirstPreOrder(Paths.get("/"))} may return the * following paths: {@code ["/", "/etc", "/etc/config.txt", "/etc/fonts", "/home", "/home/alice", * ...]} * * @since 23.5 */ public static Traverser<Path> fileTraverser() { return Traverser.forTree(MoreFiles::fileTreeChildren); } private static Iterable<Path> fileTreeChildren(Path dir) { if (Files.isDirectory(dir, NOFOLLOW_LINKS)) { try { return listFiles(dir); } catch (IOException e) { // the exception thrown when iterating a DirectoryStream if an I/O exception occurs throw new DirectoryIteratorException(e); } } return ImmutableList.of(); } /** * Returns a predicate that returns the result of {@link java.nio.file.Files#isDirectory(Path, * LinkOption...)} on input paths with the given link options. */ public static Predicate<Path> isDirectory(LinkOption... options) { LinkOption[] optionsCopy = options.clone(); return new Predicate<Path>() { @Override public boolean apply(Path input) { return Files.isDirectory(input, optionsCopy); } @Override public String toString() { return "MoreFiles.isDirectory(" + Arrays.toString(optionsCopy) + ")"; } }; } /** Returns whether or not the file with the given name in the given dir is a directory. */ private static boolean isDirectory( SecureDirectoryStream<Path> dir, Path name, LinkOption... options) throws IOException { return dir.getFileAttributeView(name, BasicFileAttributeView.class, options) .readAttributes() .isDirectory(); } /** * Returns a predicate that returns the result of {@link java.nio.file.Files#isRegularFile(Path, * LinkOption...)} on input paths with the given link options. */ public static Predicate<Path> isRegularFile(LinkOption... options) { LinkOption[] optionsCopy = options.clone(); return new Predicate<Path>() { @Override public boolean apply(Path input) { return Files.isRegularFile(input, optionsCopy); } @Override public String toString() { return "MoreFiles.isRegularFile(" + Arrays.toString(optionsCopy) + ")"; } }; } /** * Returns true if the files located by the given paths exist, are not directories, and contain * the same bytes. * * @throws IOException if an I/O error occurs * @since 22.0 */ public static boolean equal(Path path1, Path path2) throws IOException { checkNotNull(path1); checkNotNull(path2); if (Files.isSameFile(path1, path2)) { return true; } /* * Some operating systems may return zero as the length for files denoting system-dependent * entities such as devices or pipes, in which case we must fall back on comparing the bytes * directly. */ ByteSource source1 = asByteSource(path1); ByteSource source2 = asByteSource(path2); long len1 = source1.sizeIfKnown().or(0L); long len2 = source2.sizeIfKnown().or(0L); if (len1 != 0 && len2 != 0 && len1 != len2) { return false; } return source1.contentEquals(source2); } /** * Like the unix command of the same name, creates an empty file or updates the last modified * timestamp of the existing file at the given path to the current system time. */ @SuppressWarnings("GoodTime") // reading system time without TimeSource public static void touch(Path path) throws IOException { checkNotNull(path); try { Files.setLastModifiedTime(path, FileTime.fromMillis(System.currentTimeMillis())); } catch (NoSuchFileException e) { try { Files.createFile(path); } catch (FileAlreadyExistsException ignore) { // The file didn't exist when we called setLastModifiedTime, but it did when we called // createFile, so something else created the file in between. The end result is // what we wanted: a new file that probably has its last modified time set to approximately // now. Or it could have an arbitrary last modified time set by the creator, but that's no // different than if another process set its last modified time to something else after we // created it here. } } } /** * Creates any necessary but nonexistent parent directories of the specified path. Note that if * this operation fails, it may have succeeded in creating some (but not all) of the necessary * parent directories. The parent directory is created with the given {@code attrs}. * * @throws IOException if an I/O error occurs, or if any necessary but nonexistent parent * directories of the specified file could not be created. */ public static void createParentDirectories(Path path, FileAttribute<?>... attrs) throws IOException { // Interestingly, unlike File.getCanonicalFile(), Path/Files provides no way of getting the // canonical (absolute, normalized, symlinks resolved, etc.) form of a path to a nonexistent // file. getCanonicalFile() can at least get the canonical form of the part of the path which // actually exists and then append the normalized remainder of the path to that. Path normalizedAbsolutePath = path.toAbsolutePath().normalize(); Path parent = normalizedAbsolutePath.getParent(); if (parent == null) { // The given directory is a filesystem root. All zero of its ancestors exist. This doesn't // mean that the root itself exists -- consider x:\ on a Windows machine without such a // drive -- or even that the caller can create it, but this method makes no such guarantees // even for non-root files. return; } // Check if the parent is a directory first because createDirectories will fail if the parent // exists and is a symlink to a directory... we'd like for this to succeed in that case. // (I'm kind of surprised that createDirectories would fail in that case; doesn't seem like // what you'd want to happen.) if (!Files.isDirectory(parent)) { Files.createDirectories(parent, attrs); if (!Files.isDirectory(parent)) { throw new IOException("Unable to create parent directories of " + path); } } } /** * Returns the <a href="http://en.wikipedia.org/wiki/Filename_extension">file extension</a> for * the file at the given path, or the empty string if the file has no extension. The result does * not include the '{@code .}'. * * <p><b>Note:</b> This method simply returns everything after the last '{@code .}' in the file's * name as determined by {@link Path#getFileName}. It does not account for any filesystem-specific * behavior that the {@link Path} API does not already account for. For example, on NTFS it will * report {@code "txt"} as the extension for the filename {@code "foo.exe:.txt"} even though NTFS * will drop the {@code ":.txt"} part of the name when the file is actually created on the * filesystem due to NTFS's <a * href="https://learn.microsoft.com/en-us/archive/blogs/askcore/alternate-data-streams-in-ntfs">Alternate * Data Streams</a>. */ public static String getFileExtension(Path path) { Path name = path.getFileName(); // null for empty paths and root-only paths if (name == null) { return ""; } String fileName = name.toString(); int dotIndex = fileName.lastIndexOf('.'); return dotIndex == -1 ? "" : fileName.substring(dotIndex + 1); } /** * Returns the file name without its <a * href="http://en.wikipedia.org/wiki/Filename_extension">file extension</a> or path. This is * similar to the {@code basename} unix command. The result does not include the '{@code .}'. */ public static String getNameWithoutExtension(Path path) { Path name = path.getFileName(); // null for empty paths and root-only paths if (name == null) { return ""; } String fileName = name.toString(); int dotIndex = fileName.lastIndexOf('.'); return dotIndex == -1 ? fileName : fileName.substring(0, dotIndex); } /** * Deletes the file or directory at the given {@code path} recursively. Deletes symbolic links, * not their targets (subject to the caveat below). * * <p>If an I/O exception occurs attempting to read, open or delete any file under the given * directory, this method skips that file and continues. All such exceptions are collected and, * after attempting to delete all files, an {@code IOException} is thrown containing those * exceptions as {@linkplain Throwable#getSuppressed() suppressed exceptions}. * * <h2>Warning: Security of recursive deletes</h2> * * <p>On a file system that supports symbolic links and does <i>not</i> support {@link * SecureDirectoryStream}, it is possible for a recursive delete to delete files and directories * that are <i>outside</i> the directory being deleted. This can happen if, after checking that a * file is a directory (and not a symbolic link), that directory is replaced by a symbolic link to * an outside directory before the call that opens the directory to read its entries. * * <p>By default, this method throws {@link InsecureRecursiveDeleteException} if it can't * guarantee the security of recursive deletes. If you wish to allow the recursive deletes anyway, * pass {@link RecursiveDeleteOption#ALLOW_INSECURE} to this method to override that behavior. * * @throws NoSuchFileException if {@code path} does not exist <i>(optional specific exception)</i> * @throws InsecureRecursiveDeleteException if the security of recursive deletes can't be * guaranteed for the file system and {@link RecursiveDeleteOption#ALLOW_INSECURE} was not * specified * @throws IOException if {@code path} or any file in the subtree rooted at it can't be deleted * for any reason */ public static void deleteRecursively(Path path, RecursiveDeleteOption... options) throws IOException { Path parentPath = getParentPath(path); if (parentPath == null) { throw new FileSystemException(path.toString(), null, "can't delete recursively"); } Collection<IOException> exceptions = null; // created lazily if needed try { boolean sdsSupported = false; try (DirectoryStream<Path> parent = Files.newDirectoryStream(parentPath)) { if (parent instanceof SecureDirectoryStream) { sdsSupported = true; exceptions = deleteRecursivelySecure( (SecureDirectoryStream<Path>) parent, /* * requireNonNull is safe because paths have file names when they have parents, * and we checked for a parent at the beginning of the method. */ requireNonNull(path.getFileName())); } } if (!sdsSupported) { checkAllowsInsecure(path, options); exceptions = deleteRecursivelyInsecure(path); } } catch (IOException e) { if (exceptions == null) { throw e; } else { exceptions.add(e); } } if (exceptions != null) { throwDeleteFailed(path, exceptions); } } /** * Deletes all files within the directory at the given {@code path} {@linkplain #deleteRecursively * recursively}. Does not delete the directory itself. Deletes symbolic links, not their targets * (subject to the caveat below). If {@code path} itself is a symbolic link to a directory, that * link is followed and the contents of the directory it targets are deleted. * * <p>If an I/O exception occurs attempting to read, open or delete any file under the given * directory, this method skips that file and continues. All such exceptions are collected and, * after attempting to delete all files, an {@code IOException} is thrown containing those * exceptions as {@linkplain Throwable#getSuppressed() suppressed exceptions}. * * <h2>Warning: Security of recursive deletes</h2> * * <p>On a file system that supports symbolic links and does <i>not</i> support {@link * SecureDirectoryStream}, it is possible for a recursive delete to delete files and directories * that are <i>outside</i> the directory being deleted. This can happen if, after checking that a * file is a directory (and not a symbolic link), that directory is replaced by a symbolic link to * an outside directory before the call that opens the directory to read its entries. * * <p>By default, this method throws {@link InsecureRecursiveDeleteException} if it can't * guarantee the security of recursive deletes. If you wish to allow the recursive deletes anyway, * pass {@link RecursiveDeleteOption#ALLOW_INSECURE} to this method to override that behavior. * * @throws NoSuchFileException if {@code path} does not exist <i>(optional specific exception)</i> * @throws NotDirectoryException if the file at {@code path} is not a directory <i>(optional * specific exception)</i> * @throws InsecureRecursiveDeleteException if the security of recursive deletes can't be * guaranteed for the file system and {@link RecursiveDeleteOption#ALLOW_INSECURE} was not * specified * @throws IOException if one or more files can't be deleted for any reason */ public static void deleteDirectoryContents(Path path, RecursiveDeleteOption... options) throws IOException { Collection<IOException> exceptions = null; // created lazily if needed try (DirectoryStream<Path> stream = Files.newDirectoryStream(path)) { if (stream instanceof SecureDirectoryStream) { SecureDirectoryStream<Path> sds = (SecureDirectoryStream<Path>) stream; exceptions = deleteDirectoryContentsSecure(sds); } else { checkAllowsInsecure(path, options); exceptions = deleteDirectoryContentsInsecure(stream); } } catch (IOException e) { if (exceptions == null) { throw e; } else { exceptions.add(e); } } if (exceptions != null) { throwDeleteFailed(path, exceptions); } } /** * Secure recursive delete using {@code SecureDirectoryStream}. Returns a collection of exceptions * that occurred or null if no exceptions were thrown. */ private static @Nullable Collection<IOException> deleteRecursivelySecure( SecureDirectoryStream<Path> dir, Path path) { Collection<IOException> exceptions = null; try { if (isDirectory(dir, path, NOFOLLOW_LINKS)) { try (SecureDirectoryStream<Path> childDir = dir.newDirectoryStream(path, NOFOLLOW_LINKS)) { exceptions = deleteDirectoryContentsSecure(childDir); } // If exceptions is not null, something went wrong trying to delete the contents of the // directory, so we shouldn't try to delete the directory as it will probably fail. if (exceptions == null) { dir.deleteDirectory(path); } } else { dir.deleteFile(path); } return exceptions; } catch (IOException e) { return addException(exceptions, e); } } /** * Secure method for deleting the contents of a directory using {@code SecureDirectoryStream}. * Returns a collection of exceptions that occurred or null if no exceptions were thrown. */ private static @Nullable Collection<IOException> deleteDirectoryContentsSecure( SecureDirectoryStream<Path> dir) { Collection<IOException> exceptions = null; try { for (Path path : dir) { exceptions = concat(exceptions, deleteRecursivelySecure(dir, path.getFileName())); } return exceptions; } catch (DirectoryIteratorException e) { return addException(exceptions, e.getCause()); } } /** * Insecure recursive delete for file systems that don't support {@code SecureDirectoryStream}. * Returns a collection of exceptions that occurred or null if no exceptions were thrown. */ private static @Nullable Collection<IOException> deleteRecursivelyInsecure(Path path) { Collection<IOException> exceptions = null; try { if (Files.isDirectory(path, NOFOLLOW_LINKS)) { try (DirectoryStream<Path> stream = Files.newDirectoryStream(path)) { exceptions = deleteDirectoryContentsInsecure(stream); } } // If exceptions is not null, something went wrong trying to delete the contents of the // directory, so we shouldn't try to delete the directory as it will probably fail. if (exceptions == null) { Files.delete(path); } return exceptions; } catch (IOException e) { return addException(exceptions, e); } } /** * Simple, insecure method for deleting the contents of a directory for file systems that don't * support {@code SecureDirectoryStream}. Returns a collection of exceptions that occurred or null * if no exceptions were thrown. */ private static @Nullable Collection<IOException> deleteDirectoryContentsInsecure( DirectoryStream<Path> dir) { Collection<IOException> exceptions = null; try { for (Path entry : dir) { exceptions = concat(exceptions, deleteRecursivelyInsecure(entry)); } return exceptions; } catch (DirectoryIteratorException e) { return addException(exceptions, e.getCause()); } } /** * Returns a path to the parent directory of the given path. If the path actually has a parent * path, this is simple. Otherwise, we need to do some trickier things. Returns null if the path * is a root or is the empty path. */ private static @Nullable Path getParentPath(Path path) { Path parent = path.getParent(); // Paths that have a parent: if (parent != null) { // "/foo" ("/") // "foo/bar" ("foo") // "C:\foo" ("C:\") // "\foo" ("\" - current drive for process on Windows) // "C:foo" ("C:" - working dir of drive C on Windows) return parent; } // Paths that don't have a parent: if (path.getNameCount() == 0) { // "/", "C:\", "\" (no parent) // "" (undefined, though typically parent of working dir) // "C:" (parent of working dir of drive C on Windows) // // For working dir paths ("" and "C:"), return null because: // A) it's not specified that "" is the path to the working directory. // B) if we're getting this path for recursive delete, it's typically not possible to // delete the working dir with a relative path anyway, so it's ok to fail. // C) if we're getting it for opening a new SecureDirectoryStream, there's no need to get // the parent path anyway since we can safely open a DirectoryStream to the path without // worrying about a symlink. return null; } else { // "foo" (working dir) return path.getFileSystem().getPath("."); } } /** Checks that the given options allow an insecure delete, throwing an exception if not. */ private static void checkAllowsInsecure(Path path, RecursiveDeleteOption[] options) throws InsecureRecursiveDeleteException { if (!Arrays.asList(options).contains(RecursiveDeleteOption.ALLOW_INSECURE)) { throw new InsecureRecursiveDeleteException(path.toString()); } } /** * Adds the given exception to the given collection, creating the collection if it's null. Returns * the collection. */ private static Collection<IOException> addException( @Nullable Collection<IOException> exceptions, IOException e) { if (exceptions == null) { exceptions = new ArrayList<>(); // don't need Set semantics } exceptions.add(e); return exceptions; } /** * Concatenates the contents of the two given collections of exceptions. If either collection is * null, the other collection is returned. Otherwise, the elements of {@code other} are added to * {@code exceptions} and {@code exceptions} is returned. */ private static @Nullable Collection<IOException> concat( @Nullable Collection<IOException> exceptions, @Nullable Collection<IOException> other) { if (exceptions == null) { return other; } else if (other != null) { exceptions.addAll(other); } return exceptions; } /** * Throws an exception indicating that one or more files couldn't be deleted when deleting {@code * path} or its contents. * * <p>If there is only one exception in the collection, and it is a {@link NoSuchFileException} * thrown because {@code path} itself didn't exist, then throws that exception. Otherwise, the * thrown exception contains all the exceptions in the given collection as suppressed exceptions. */ private static void throwDeleteFailed(Path path, Collection<IOException> exceptions) throws FileSystemException { NoSuchFileException pathNotFound = pathNotFound(path, exceptions); if (pathNotFound != null) { throw pathNotFound; } // TODO(cgdecker): Should there be a custom exception type for this? // Also, should we try to include the Path of each file we may have failed to delete rather // than just the exceptions that occurred? FileSystemException deleteFailed = new FileSystemException( path.toString(), null, "failed to delete one or more files; see suppressed exceptions for details"); for (IOException e : exceptions) { deleteFailed.addSuppressed(e); } throw deleteFailed; } private static @Nullable NoSuchFileException pathNotFound( Path path, Collection<IOException> exceptions) { if (exceptions.size() != 1) { return null; } IOException exception = getOnlyElement(exceptions); if (!(exception instanceof NoSuchFileException)) { return null; } NoSuchFileException noSuchFileException = (NoSuchFileException) exception; String exceptionFile = noSuchFileException.getFile(); if (exceptionFile == null) { /* * It's not clear whether this happens in practice, especially with the filesystem * implementations that are built into java.nio. */ return null; } Path parentPath = getParentPath(path); if (parentPath == null) { /* * This is probably impossible: * * - In deleteRecursively, we require the path argument to have a parent. * * - In deleteDirectoryContents, the path argument may have no parent. Fortunately, all the * *other* paths we process will be descendants of that. That leaves only the original path * argument for us to consider. And the only place we call pathNotFound is from * throwDeleteFailed, and the other place that we call throwDeleteFailed inside * deleteDirectoryContents is when an exception is thrown during the recursive steps. Any * failure during the initial lookup of the path argument itself is rethrown directly. So * any exception that we're seeing here is from a descendant, which naturally has a parent. * I think. * * Still, if this can happen somehow (a weird filesystem implementation that lets callers * change its working directly concurrently with a call to deleteDirectoryContents?), it makes * more sense for us to fall back to a generic FileSystemException (by returning null here) * than to dereference parentPath and end up producing NullPointerException. */ return null; } // requireNonNull is safe because paths have file names when they have parents. Path pathResolvedFromParent = parentPath.resolve(requireNonNull(path.getFileName())); if (exceptionFile.equals(pathResolvedFromParent.toString())) { return noSuchFileException; } return null; } }
google/j2objc
34,741
translator/src/main/java/com/google/devtools/j2objc/ast/DebugASTPrinter.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.devtools.j2objc.ast; import com.google.devtools.j2objc.gen.JavadocGenerator; import com.google.devtools.j2objc.gen.SourceBuilder; import com.google.devtools.j2objc.util.ElementUtil; import com.google.devtools.j2objc.util.TypeUtil; import com.google.devtools.j2objc.util.UnicodeUtils; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.TypeParameterElement; import javax.lang.model.element.VariableElement; import javax.lang.model.type.TypeMirror; /** * Simple AST printer, suitable for node toString() results. This printer is based on * org.eclipse.jdt.internal.core.dom.NaiveASTFlattener. * * @author Tom Ball */ @SuppressWarnings("UngroupedOverloads") public class DebugASTPrinter extends TreeVisitor { protected SourceBuilder sb = new SourceBuilder(false); private boolean inIfStatement = false; public static String toString(TreeNode node) { // Uncomment to debug print failures. // try { DebugASTPrinter printer = new DebugASTPrinter(); node.accept(printer); return printer.sb.toString(); // } catch (Throwable t) { // System.err.println("toString(" + node.getClass().getSimpleName() + ") failure"); // t.printStackTrace(); // throw t; // } } @Override public boolean visit(AnnotationTypeDeclaration node) { sb.printIndent(); printAnnotations(node.getAnnotations()); printModifiers(node.getModifiers()); sb.print("@interface "); node.getName().accept(this); sb.println(" {"); sb.indent(); for (BodyDeclaration decl : node.getBodyDeclarations()) { decl.accept(this); } sb.unindent(); sb.println("}"); return false; } @Override public boolean visit(AnnotationTypeMemberDeclaration node) { sb.printIndent(); printAnnotations(node.getAnnotations()); printModifiers(node.getModifiers()); sb.print(node.getTypeMirror().toString()); sb.print(' '); sb.print(ElementUtil.getName(node.getExecutableElement())); sb.print("()"); if (node.getDefault() != null) { sb.print(" default "); node.getDefault().accept(this); } sb.println(';'); return false; } @Override public boolean visit(ArrayAccess node) { node.getArray().accept(this); sb.print('['); node.getIndex().accept(this); sb.print(']'); return false; } @Override public boolean visit(ArrayCreation node) { Type componentType = node.getType().getComponentType(); int emptyDims = 1; while (componentType.getKind() == TreeNode.Kind.ARRAY_TYPE) { componentType = ((ArrayType) componentType).getComponentType(); emptyDims++; } emptyDims -= node.getDimensions().size(); sb.print("new "); componentType.accept(this); for (Expression dim : node.getDimensions()) { sb.print('['); dim.accept(this); sb.print(']'); } for (int i = 0; i < emptyDims; i++) { sb.print("[]"); } if (node.getInitializer() != null) { node.getInitializer().accept(this); } return false; } @Override public boolean visit(ArrayInitializer node) { sb.print('{'); Iterator<Expression> iter = node.getExpressions().iterator(); while (iter.hasNext()) { Expression expr = iter.next(); expr.accept(this); if (iter.hasNext()) { sb.print(','); } } sb.print('}'); return false; } @Override public boolean visit(ArrayType node) { node.getComponentType().accept(this); sb.print("[]"); return false; } @Override public boolean visit(AssertStatement node) { sb.printIndent(); sb.print("assert "); node.getExpression().accept(this); if (node.getMessage() != null) { sb.print(" : "); node.getMessage().accept(this); } sb.println(';'); return false; } @Override public boolean visit(Assignment node) { node.getLeftHandSide().accept(this); sb.print(node.getOperator().toString()); node.getRightHandSide().accept(this); return false; } @Override public boolean visit(Block node) { sb.println('{'); sb.indent(); for (Statement stmt : node.getStatements()) { stmt.accept(this); } sb.unindent(); sb.printIndent(); sb.println('}'); return false; } @Override public boolean visit(BooleanLiteral node) { sb.print(node.booleanValue() ? "true" : "false"); return false; } @Override public boolean visit(BreakStatement node) { sb.printIndent(); sb.print("break"); if (node.getLabel() != null) { sb.print(' '); node.getLabel().accept(this); } sb.println(';'); return false; } @Override public boolean visit(CastExpression node) { sb.print('('); node.getType().accept(this); sb.print(')'); node.getExpression().accept(this); return false; } @Override public boolean visit(CatchClause node) { sb.print("catch ("); node.getException().accept(this); sb.print(") "); node.getBody().accept(this); return false; } @Override public boolean visit(CharacterLiteral node) { sb.print(UnicodeUtils.escapeCharLiteral(node.charValue())); return false; } @Override public boolean visit(ClassInstanceCreation node) { if (node.getExpression() != null) { node.getExpression().accept(this); sb.print('.'); } sb.print("new "); printTypeParameters(node.getExecutableElement().getTypeParameters()); node.getType().accept(this); sb.print("("); for (Iterator<Expression> it = node.getArguments().iterator(); it.hasNext(); ) { Expression e = it.next(); e.accept(this); if (it.hasNext()) { sb.print(','); } } sb.print(')'); if (node.getAnonymousClassDeclaration() != null) { node.getAnonymousClassDeclaration().accept(this); } return false; } @Override public boolean visit(CommaExpression node) { sb.print('('); for (Iterator<Expression> it = node.getExpressions().iterator(); it.hasNext(); ) { Expression e = it.next(); e.accept(this); if (it.hasNext()) { sb.print(", "); } } sb.print(')'); return false; } @Override public boolean visit(CompilationUnit node) { if (!node.getPackage().isDefaultPackage()) { node.getPackage().accept(this); } for (Iterator<AbstractTypeDeclaration> it = node.getTypes().iterator(); it.hasNext(); ) { it.next().accept(this); } return false; } @Override public boolean visit(ConditionalExpression node) { node.getExpression().accept(this); sb.print(" ? "); node.getThenExpression().accept(this); sb.print(" : "); node.getElseExpression().accept(this); return false; } @Override public boolean visit(ConstructorInvocation node) { sb.printIndent(); printTypeParameters(node.getExecutableElement().getTypeParameters()); sb.print("this("); for (Iterator<Expression> it = node.getArguments().iterator(); it.hasNext(); ) { it.next().accept(this); if (it.hasNext()) { sb.print(','); } } sb.println(");"); return false; } @Override public boolean visit(ContinueStatement node) { sb.printIndent(); sb.print("continue"); if (node.getLabel() != null) { sb.print(' '); node.getLabel().accept(this); } sb.println(';'); return false; } @Override public boolean visit(CreationReference node) { node.getType().accept(this); sb.print("::new"); return false; } @Override public boolean visit(CStringLiteral node) { sb.print(node.getLiteralValue()); return false; } @Override public boolean visit(DoStatement node) { sb.printIndent(); sb.print("do "); node.getBody().accept(this); sb.printIndent(); sb.print("while ("); node.getExpression().accept(this); sb.println(");"); return false; } @Override public boolean visit(EmptyStatement node) { sb.printIndent(); sb.println(';'); return false; } @Override public boolean visit(EnhancedForStatement node) { sb.printIndent(); sb.print("for ("); node.getParameter().accept(this); sb.print(" : "); node.getExpression().accept(this); sb.print(") "); node.getBody().accept(this); return false; } @Override public boolean visit(EnumConstantDeclaration node) { sb.printIndent(); printAnnotations(node.getAnnotations()); printModifiers(node.getModifiers()); sb.print(ElementUtil.getName(node.getVariableElement())); if (!node.getArguments().isEmpty()) { sb.print('('); for (Iterator<Expression> it = node.getArguments().iterator(); it.hasNext(); ) { Expression e = (Expression) it.next(); e.accept(this); if (it.hasNext()) { sb.print(','); } } sb.print(')'); } if (node.getAnonymousClassDeclaration() != null) { node.getAnonymousClassDeclaration().accept(this); } return false; } @Override public boolean visit(EnumDeclaration node) { sb.printIndent(); printAnnotations(node.getAnnotations()); printModifiers(node.getModifiers()); sb.print("enum "); node.getName().accept(this); sb.print(' '); sb.print('{'); for (Iterator<EnumConstantDeclaration> it = node.getEnumConstants().iterator(); it.hasNext(); ) { EnumConstantDeclaration d = (EnumConstantDeclaration) it.next(); d.accept(this); if (it.hasNext()) { sb.print(", "); } } if (!node.getBodyDeclarations().isEmpty()) { sb.print("; "); for (Iterator<BodyDeclaration> it = node.getBodyDeclarations().iterator(); it.hasNext(); ) { it.next().accept(this); } } printStaticBlock(node); sb.println('}'); return false; } @Override public boolean visit(ExpressionMethodReference node) { node.getExpression().accept(this); sb.print("::"); sb.print(ElementUtil.getName(node.getExecutableElement())); return false; } @Override public boolean visit(ExpressionStatement node) { sb.printIndent(); node.getExpression().accept(this); sb.println(';'); return false; } @Override public boolean visit(FieldAccess node) { node.getExpression().accept(this); sb.print('.'); node.getName().accept(this); return false; } @Override public boolean visit(FieldDeclaration node) { if (node.getJavadoc() != null) { node.getJavadoc().accept(this); } sb.printIndent(); printAnnotations(node.getAnnotations()); printModifiers(node.getModifiers()); sb.print(node.getTypeMirror().toString()); sb.print(' '); node.getFragment().accept(this); sb.println(';'); return false; } @Override public boolean visit(ForStatement node) { sb.printIndent(); sb.print("for ("); for (Iterator<Expression> it = node.getInitializers().iterator(); it.hasNext(); ) { it.next().accept(this); if (it.hasNext()) { sb.print(", "); } } sb.print("; "); if (node.getExpression() != null) { node.getExpression().accept(this); } sb.print("; "); for (Iterator<Expression> it = node.getUpdaters().iterator(); it.hasNext(); ) { it.next().accept(this); if (it.hasNext()) { sb.print(", "); } } sb.print(") "); node.getBody().accept(this); return false; } @Override public boolean visit(FunctionDeclaration node) { sb.printIndent(); printAnnotations(node.getAnnotations()); printModifiers(node.getModifiers()); node.getReturnType().accept(this); sb.print(' '); sb.print(node.getName()); sb.print('('); for (Iterator<SingleVariableDeclaration> it = node.getParameters().iterator(); it.hasNext(); ) { it.next().accept(this); if (it.hasNext()) { sb.print(','); } } sb.print(')'); if (node.getBody() == null) { sb.print(';'); } else { node.getBody().accept(this); } return false; } @Override public boolean visit(FunctionInvocation node) { sb.append(node.getName()); sb.append('('); for (Iterator<Expression> iter = node.getArguments().iterator(); iter.hasNext(); ) { iter.next().accept(this); if (iter.hasNext()) { sb.append(", "); } } sb.append(')'); return false; } @Override public boolean visit(IfStatement node) { if (!inIfStatement) { sb.printIndent(); } boolean wasInStatement = inIfStatement; inIfStatement = true; sb.print("if ("); node.getExpression().accept(this); sb.print(") "); node.getThenStatement().accept(this); if (node.getElseStatement() != null) { sb.printIndent(); sb.print("else "); node.getElseStatement().accept(this); } inIfStatement = wasInStatement; return false; } @Override public boolean visit(InfixExpression node) { boolean isFirst = true; String op = ' ' + node.getOperator().toString() + ' '; for (Expression operand : node.getOperands()) { if (!isFirst) { sb.print(op); } isFirst = false; operand.accept(this); } return false; } @Override public boolean visit(Initializer node) { printAnnotations(node.getAnnotations()); printModifiers(node.getModifiers()); node.getBody().accept(this); return false; } @Override public boolean visit(InstanceofExpression node) { node.getLeftOperand().accept(this); sb.print(" instanceof "); node.getRightOperand().accept(this); Pattern pattern = node.getPattern(); if (pattern != null && pattern.getKind() == TreeNode.Kind.BINDING_PATTERN) { sb.print(" "); var unused = visit(((Pattern.BindingPattern) pattern).getVariable()); } return false; } @Override public boolean visit(IntersectionType node) { sb.print('('); boolean delimiterFlag = false; for (Type t : node.types()) { if (delimiterFlag) { sb.print(" & "); } else { delimiterFlag = true; } t.accept(this); } sb.print(')'); return false; } @Override public boolean visit(Javadoc node) { sb.println(JavadocGenerator.toString(node)); return false; } @Override public boolean visit(LabeledStatement node) { sb.printIndent(); node.getLabel().accept(this); sb.print(": "); node.getBody().accept(this); return false; } @Override public boolean visit(LambdaExpression node) { sb.print("("); boolean delimiterFlag = false; for (VariableDeclaration x : node.getParameters()) { VariableElement variableElement = x.getVariableElement(); if (delimiterFlag) { sb.print(", "); } else { delimiterFlag = true; } sb.print(variableElement.asType().toString()); sb.print(" "); sb.print(variableElement.getSimpleName().toString()); } sb.print(") -> "); node.getBody().accept(this); return false; } @Override public boolean visit(MarkerAnnotation node) { sb.print('@'); node.getTypeName().accept(this); return false; } @Override public boolean visit(MemberValuePair node) { node.getName().accept(this); sb.print('='); node.getValue().accept(this); return false; } protected void printMethodBody(MethodDeclaration node) { if (node.getBody() == null) { sb.println(';'); } else { node.getBody().accept(this); } } @Override public boolean visit(MethodDeclaration node) { sb.printIndent(); printAnnotations(node.getAnnotations()); printModifiers(node.getModifiers()); ExecutableElement meth = node.getExecutableElement(); printTypeParameters(meth.getTypeParameters()); if (!node.isConstructor()) { sb.print(node.getReturnTypeMirror().toString()); sb.print(' '); } sb.print(ElementUtil.getName(meth)); sb.print("("); for (Iterator<SingleVariableDeclaration> it = node.getParameters().iterator(); it.hasNext(); ) { it.next().accept(this); if (it.hasNext()) { sb.print(','); } } sb.print(")"); List<? extends TypeMirror> exceptions = meth.getThrownTypes(); if (exceptions.size() > 0) { sb.print(" throws "); for (int i = 0; i < exceptions.size(); ) { sb.print(exceptions.get(i).toString()); if (++i < exceptions.size()){ sb.print(','); } } sb.print(' '); } printMethodBody(node); return false; } @Override public boolean visit(MethodInvocation node) { if (node.getExpression() != null) { node.getExpression().accept(this); sb.print("."); } printTypeParameters(node.getExecutableElement().getTypeParameters()); sb.print(ElementUtil.getName(node.getExecutableElement())); sb.print('('); for (Iterator<Expression> it = node.getArguments().iterator(); it.hasNext(); ) { it.next().accept(this); if (it.hasNext()) { sb.print(','); } } sb.print(')'); return false; } @Override public boolean visit(NativeDeclaration node) { if (node.getImplementationCode() != null) { sb.println(node.getImplementationCode()); } else if (node.getHeaderCode() != null) { sb.println(node.getHeaderCode()); } return false; } @Override public boolean visit(NativeExpression node) { sb.print(node.getCode()); return false; } @Override public boolean visit(NativeStatement node) { sb.printIndent(); sb.println(node.getCode()); return false; } @Override public boolean visit(NormalAnnotation node) { sb.print("@"); node.getTypeName().accept(this); sb.print("("); for (Iterator<MemberValuePair> it = node.getValues().iterator(); it.hasNext(); ) { MemberValuePair p = (MemberValuePair) it.next(); p.accept(this); if (it.hasNext()) { sb.print(','); } } sb.print(")"); return false; } @Override public boolean visit(NullLiteral node) { sb.print("null"); return false; } @Override public boolean visit(NumberLiteral node) { String text = node.getToken(); sb.print(text != null ? text : node.getValue().toString()); return false; } @Override public boolean visit(PackageDeclaration node) { printAnnotations(node.getAnnotations()); sb.print("package "); node.getName().accept(this); sb.println(';'); return false; } @Override public boolean visit(ParameterizedType node) { node.getType().accept(this); return false; } @Override public boolean visit(ParenthesizedExpression node) { sb.print('('); node.getExpression().accept(this); sb.print(')'); return false; } @Override public boolean visit(PostfixExpression node) { node.getOperand().accept(this); sb.print(node.getOperator().toString()); return false; } @Override public boolean visit(PrefixExpression node) { sb.print(node.getOperator().toString()); node.getOperand().accept(this); return false; } @Override public boolean visit(PrimitiveType node) { sb.print(node.getTypeMirror().toString()); return false; } @Override public boolean visit(PropertyAnnotation node) { String attributeString = PropertyAnnotation.toAttributeString(node.getPropertyAttributes()); sb.print("@Property(\"" + attributeString + "\")"); return false; } @Override public boolean visit(QualifiedName node) { node.getQualifier().accept(this); sb.print("."); node.getName().accept(this); return false; } @Override public boolean visit(QualifiedType node) { sb.print(node.getTypeMirror().toString()); return false; } @Override public boolean visit(RecordDeclaration node) { sb.printIndent(); printAnnotations(node.getAnnotations()); printModifiers(node.getModifiers()); sb.print("record "); node.getName().accept(this); sb.print(' '); sb.println('{'); sb.indent(); List<BodyDeclaration> bodyDeclarations = new ArrayList<>(node.getBodyDeclarations()); sort(bodyDeclarations); for (BodyDeclaration bodyDecl : bodyDeclarations) { bodyDecl.accept(this); } printStaticBlock(node); sb.unindent(); sb.printIndent(); sb.println('}'); return false; } @Override public boolean visit(ReturnStatement node) { sb.printIndent(); sb.print("return"); if (node.getExpression() != null) { sb.print(' '); node.getExpression().accept(this); } sb.println(';'); return false; } @Override public boolean visit(SimpleName node) { sb.print(node.getIdentifier()); return false; } @Override public boolean visit(SimpleType node) { sb.print(node.getTypeMirror().toString()); return false; } @Override public boolean visit(SingleMemberAnnotation node) { sb.print("@"); node.getTypeName().accept(this); sb.print("("); node.getValue().accept(this); sb.print(")"); return false; } @Override public boolean visit(SingleVariableDeclaration node) { sb.printIndent(); printModifiers(ElementUtil.fromModifierSet(node.getVariableElement().getModifiers())); node.getType().accept(this); if (node.isVarargs()) { sb.print("..."); } sb.print(' '); sb.print(ElementUtil.getName(node.getVariableElement())); for (int i = 0; i < node.getExtraDimensions(); i++) { sb.print("[]"); } if (node.getInitializer() != null) { sb.print("="); node.getInitializer().accept(this); } return false; } @Override public boolean visit(StringLiteral node) { sb.printf("\"%s\"", UnicodeUtils.escapeStringLiteral(node.getLiteralValue())); return false; } @Override public boolean visit(SuperConstructorInvocation node) { sb.printIndent(); if (node.getExpression() != null) { node.getExpression().accept(this); sb.print("."); } printTypeParameters(node.getExecutableElement().getTypeParameters()); sb.print("super("); for (Iterator<Expression> it = node.getArguments().iterator(); it.hasNext(); ) { it.next().accept(this); if (it.hasNext()) { sb.print(','); } } sb.println(");"); return false; } @Override public boolean visit(SuperFieldAccess node) { if (node.getQualifier() != null) { node.getQualifier().accept(this); sb.print("."); } sb.print("super."); sb.print(ElementUtil.getName(node.getVariableElement())); return false; } @Override public boolean visit(SuperMethodInvocation node) { if (node.getQualifier() != null) { node.getQualifier().accept(this); sb.print("."); } sb.print("super."); printTypeParameters(node.getExecutableElement().getTypeParameters()); sb.print(ElementUtil.getName(node.getExecutableElement())); sb.print("("); for (Iterator<Expression> it = node.getArguments().iterator(); it.hasNext(); ) { it.next().accept(this); if (it.hasNext()) { sb.print(','); } } sb.print(")"); return false; } @Override public boolean visit(SuperMethodReference node) { if (node.getQualifier() != null) { node.getQualifier().accept(this); sb.print("."); } sb.print("super::"); sb.print(ElementUtil.getName(node.getExecutableElement())); return false; } @Override public boolean visit(SwitchCase node) { sb.printIndent(); if (node.isDefault()) { sb.println("default:"); } else { sb.print("case "); node.getExpression().accept(this); sb.println(":"); } return false; } @Override public boolean visit(SwitchExpressionCase node) { sb.printIndent(); if (node.isDefault()) { sb.print("default: "); } else { sb.print("case "); for (Iterator<Expression> it = node.getExpressions().iterator(); it.hasNext(); ) { it.next().accept(this); if (it.hasNext()) { sb.print(", "); } } if (node.getPattern() != null) { if (node.getPattern() instanceof Pattern.BindingPattern) { SingleVariableDeclaration varDecl = ((Pattern.BindingPattern) node.getPattern()).getVariable(); VariableElement variableElement = varDecl.getVariableElement(); sb.print(variableElement.asType().toString()); sb.print(" "); sb.print(variableElement.getSimpleName().toString()); } } if (node.getGuard() != null) { sb.print(" when "); node.getGuard().accept(this); } sb.print(": "); } TreeNode body = node.getBody(); if (body == null) { sb.newline(); } else { sb.print(body.toString()); } return false; } @Override public boolean visit(SwitchStatement node) { sb.printIndent(); sb.print("switch ("); node.getExpression().accept(this); sb.print(") "); sb.println("{"); sb.indent(); for (Iterator<Statement> it = node.getStatements().iterator(); it.hasNext(); ) { it.next().accept(this); } sb.unindent(); sb.printIndent(); sb.println("};"); return false; } @Override public boolean visit(SwitchExpression node) { sb.print("switch ("); node.getExpression().accept(this); sb.print(") "); sb.println("{"); sb.indent(); for (Statement element : node.getStatements()) { element.accept(this); } sb.unindent(); sb.printIndent(); sb.print("}"); return false; } @Override public boolean visit(SynchronizedStatement node) { sb.print("synchronized ("); node.getExpression().accept(this); sb.print(") "); node.getBody().accept(this); return false; } @Override public boolean visit(TagElement node) { sb.print(JavadocGenerator.toString(node)); return false; } @Override public boolean visit(ThisExpression node) { if (node.getQualifier() != null) { node.getQualifier().accept(this); sb.print("."); } sb.print("this"); return false; } @Override public boolean visit(ThrowStatement node) { sb.printIndent(); sb.print("throw "); node.getExpression().accept(this); sb.println(';'); return false; } @Override public boolean visit(TryStatement node) { sb.printIndent(); sb.print("try "); List<TreeNode> resources = node.getResources(); if (!resources.isEmpty()) { sb.print('('); for (Iterator<TreeNode> it = resources.iterator(); it.hasNext(); ) { it.next().accept(this); if (it.hasNext()) { sb.print(';'); } } sb.print(')'); } node.getBody().accept(this); sb.print(' '); for (Iterator<CatchClause> it = node.getCatchClauses().iterator(); it.hasNext(); ) { it.next().accept(this); } if (node.getFinally() != null) { sb.print(" finally "); node.getFinally().accept(this); } return false; } protected void sort(List<BodyDeclaration> lst) {} @Override public boolean visit(TypeDeclaration node) { if (node.getJavadoc() != null) { node.getJavadoc().accept(this); } printAnnotations(node.getAnnotations()); printModifiers(node.getModifiers()); sb.print(node.isInterface() ? "interface " : "class "); if (node.getName() != null) { node.getName().accept(this); printTypeParameters(node.getTypeElement().getTypeParameters()); sb.print(' '); TypeMirror superclassTypeMirror = node.getSuperclassTypeMirror(); if (!(TypeUtil.isNone(superclassTypeMirror) || TypeUtil.isJavaObject(superclassTypeMirror))) { sb.print("extends "); sb.print(superclassTypeMirror.toString()); sb.print(' '); } List<? extends TypeMirror> superInterfaceTypeMirrors = node.getSuperInterfaceTypeMirrors(); if (!superInterfaceTypeMirrors.isEmpty()) { sb.print(node.isInterface() ? "extends " : "implements "); // $NON-NLS-2$ for (Iterator<? extends TypeMirror> it = node.getSuperInterfaceTypeMirrors().iterator(); it.hasNext(); ) { sb.print(it.next().toString()); if (it.hasNext()) { sb.print(", "); } } sb.print(' '); } sb.println('{'); sb.indent(); List<BodyDeclaration> bodyDeclarations = new ArrayList<>(node.getBodyDeclarations()); sort(bodyDeclarations); for (BodyDeclaration bodyDecl : bodyDeclarations) { bodyDecl.accept(this); } printStaticBlock(node); sb.unindent(); sb.printIndent(); sb.println('}'); } else { sb.println("<uninitialized> {}"); } return false; } @Override public boolean visit(TypeLiteral node) { node.getType().accept(this); sb.print(".class"); return false; } @Override public boolean visit(TypeMethodReference node) { node.getType().accept(this); sb.print("::"); if (!node.getTypeArguments().isEmpty()) { sb.print('<'); boolean delimiterFlag = false; for (Type t : node.getTypeArguments()) { if (delimiterFlag) { sb.print(", "); } else { delimiterFlag = true; } t.accept(this); } sb.print('>'); } sb.print(ElementUtil.getName(node.getExecutableElement())); return false; } @Override public boolean visit(UnionType node) { for (Iterator<Type> it = node.getTypes().iterator(); it.hasNext(); ) { it.next().accept(this); if (it.hasNext()) { sb.print('|'); } } return false; } @Override public boolean visit(VariableDeclarationExpression node) { node.getType().accept(this); sb.print(' '); for (Iterator<VariableDeclarationFragment> it = node.getFragments().iterator(); it.hasNext(); ) { it.next().accept(this); if (it.hasNext()) { sb.print(", "); } } return false; } @Override public boolean visit(VariableDeclarationFragment node) { sb.print(ElementUtil.getName(node.getVariableElement())); for (int i = 0; i < node.getExtraDimensions(); i++) { sb.print("[]"); } if (node.getInitializer() != null) { sb.print("="); node.getInitializer().accept(this); } return false; } @Override public boolean visit(VariableDeclarationStatement node) { sb.printIndent(); printModifiers(node.getModifiers()); sb.print(node.getTypeMirror().toString()); sb.print(' '); for (Iterator<VariableDeclarationFragment> it = node.getFragments().iterator(); it.hasNext(); ) { it.next().accept(this); if (it.hasNext()) { sb.print(", "); } } sb.println(';'); return false; } @Override public boolean visit(WhileStatement node) { sb.printIndent(); sb.print("while ("); node.getExpression().accept(this); sb.print(") "); node.getBody().accept(this); return false; } @Override public boolean visit(YieldStatement node) { sb.printIndent(); sb.print("yield "); node.getExpression().accept(this); sb.println(";"); return false; } protected void printAnnotations(List<Annotation> annotations) { Iterator<Annotation> iterator = annotations.iterator(); while (iterator.hasNext()) { iterator.next().accept(this); sb.print(' '); } } public static void printModifiers(int modifiers, StringBuilder builder) { DebugASTPrinter temp = new DebugASTPrinter(); temp.printModifiers(modifiers); builder.append(temp.sb.toString()); } protected void printModifiers(int modifiers) { if (Modifier.isPublic(modifiers)) { sb.print("public "); } if (Modifier.isProtected(modifiers)) { sb.print("protected "); } if (Modifier.isPrivate(modifiers)) { sb.print("private "); } if (Modifier.isStatic(modifiers)) { sb.print("static "); } if (Modifier.isAbstract(modifiers)) { sb.print("abstract "); } if (Modifier.isFinal(modifiers)) { sb.print("final "); } if (Modifier.isSynchronized(modifiers)) { sb.print("synchronized "); } if (Modifier.isVolatile(modifiers)) { sb.print("volatile "); } if (Modifier.isNative(modifiers)) { sb.print("native "); } if (Modifier.isStrict(modifiers)) { sb.print("strictfp "); } if (Modifier.isTransient(modifiers)) { sb.print("transient "); } if ((modifiers & ElementUtil.ACC_SYNTHETIC) > 0) { sb.print("synthetic "); } } protected void printTypeParameter(TypeParameterElement element) { sb.print(element.getSimpleName().toString()); Iterator<? extends TypeMirror> boundsList = element.getBounds().iterator(); TypeMirror bound = boundsList.next(); if (!TypeUtil.isJavaObject(bound) || boundsList.hasNext()) { sb.print(" extends "); sb.print(bound.toString()); } while (boundsList.hasNext()) { sb.print(" & "); bound = boundsList.next(); sb.print(bound.toString()); } } protected void printTypeParameters(List<? extends TypeParameterElement> typeParams) { Iterator<? extends TypeParameterElement> it = typeParams.iterator(); if (it.hasNext()) { sb.print('<'); printTypeParameter(it.next()); while (it.hasNext()) { sb.print(','); printTypeParameter(it.next()); } sb.print('>'); } } protected void printStaticBlock(AbstractTypeDeclaration node) { if (!node.getClassInitStatements().isEmpty()) { sb.printIndent(); sb.println("static {"); sb.indent(); for (Statement stmt : node.getClassInitStatements()) { stmt.accept(this); } sb.unindent(); sb.printIndent(); sb.println('}'); } } }
apache/tomcat
34,980
test/org/apache/coyote/http11/filters/TestChunkedInputFilter.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.coyote.http11.filters; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.PrintWriter; import java.nio.charset.StandardCharsets; import java.util.concurrent.TimeUnit; import jakarta.servlet.AsyncContext; import jakarta.servlet.ReadListener; import jakarta.servlet.ServletException; import jakarta.servlet.ServletInputStream; import jakarta.servlet.http.HttpServlet; import jakarta.servlet.http.HttpServletRequest; import jakarta.servlet.http.HttpServletResponse; import org.junit.Assert; import org.junit.Test; import static org.apache.catalina.startup.SimpleHttpClient.CRLF; import org.apache.catalina.Context; import org.apache.catalina.Wrapper; import org.apache.catalina.startup.SimpleHttpClient; import org.apache.catalina.startup.TesterServlet; import org.apache.catalina.startup.Tomcat; import org.apache.catalina.startup.TomcatBaseTest; public class TestChunkedInputFilter extends TomcatBaseTest { private static final int EXT_SIZE_LIMIT = 10; @Test public void testChunkHeaderCRLF() throws Exception { doTestChunkingCRLF(true, true, true, true, true, true); } @Test public void testChunkHeaderLF() throws Exception { doTestChunkingCRLF(false, true, true, true, true, false); } @Test public void testChunkCRLF() throws Exception { doTestChunkingCRLF(true, true, true, true, true, true); } @Test public void testChunkLF() throws Exception { doTestChunkingCRLF(true, false, true, true, true, false); } @Test public void testFirstTrailingHeadersCRLF() throws Exception { doTestChunkingCRLF(true, true, true, true, true, true); } @Test public void testFirstTrailingHeadersLF() throws Exception { doTestChunkingCRLF(true, true, false, true, true, false); } @Test public void testSecondTrailingHeadersCRLF() throws Exception { doTestChunkingCRLF(true, true, true, true, true, true); } @Test public void testSecondTrailingHeadersLF() throws Exception { doTestChunkingCRLF(true, true, true, false, true, false); } @Test public void testEndCRLF() throws Exception { doTestChunkingCRLF(true, true, true, true, true, true); } @Test public void testEndLF() throws Exception { doTestChunkingCRLF(true, true, true, true, false, false); } private void doTestChunkingCRLF(boolean chunkHeaderUsesCRLF, boolean chunkUsesCRLF, boolean firstheaderUsesCRLF, boolean secondheaderUsesCRLF, boolean endUsesCRLF, boolean expectPass) throws Exception { // Setup Tomcat instance Tomcat tomcat = getTomcatInstance(); // No file system docBase required Context ctx = getProgrammaticRootContext(); // Configure allowed trailer headers Assert.assertTrue(tomcat.getConnector().setProperty("allowedTrailerHeaders", "x-trailer1,x-trailer2")); EchoHeaderServlet servlet = new EchoHeaderServlet(expectPass); Tomcat.addServlet(ctx, "servlet", servlet); ctx.addServletMappingDecoded("/", "servlet"); tomcat.start(); // @formatter:off String[] request = new String[] { "POST /echo-params.jsp HTTP/1.1" + CRLF + "Host: any" + CRLF + "Transfer-encoding: chunked" + CRLF + SimpleHttpClient.HTTP_HEADER_CONTENT_TYPE_FORM_URL_ENCODING + "Connection: close" + CRLF + CRLF + "3" + (chunkHeaderUsesCRLF ? CRLF : SimpleHttpClient.LF) + "a=0" + (chunkUsesCRLF ? CRLF : SimpleHttpClient.LF) + "4" + CRLF + "&b=1" + CRLF + "0" + CRLF + "x-trailer1: Test", "Value1" + (firstheaderUsesCRLF ? CRLF : SimpleHttpClient.LF) + "x-trailer2: TestValue2" + (secondheaderUsesCRLF ? CRLF : SimpleHttpClient.LF) + (endUsesCRLF ? CRLF : SimpleHttpClient.LF) }; // @formatter:on TrailerClient client = new TrailerClient(tomcat.getConnector().getLocalPort()); client.setRequest(request); client.connect(); Exception processException = null; try { client.processRequest(); } catch (Exception e) { // Socket was probably closed before client had a chance to read // response processException = e; } if (expectPass) { Assert.assertTrue(client.isResponse200()); Assert.assertEquals("nullnull7TestValue1TestValue2", client.getResponseBody()); Assert.assertNull(processException); Assert.assertFalse(servlet.getExceptionDuringRead()); } else { if (processException == null) { Assert.assertTrue(client.getResponseLine(), client.isResponse500()); } else { // Use fall-back for checking the error occurred Assert.assertTrue(servlet.getExceptionDuringRead()); } } } @Test public void testTrailingHeadersSizeLimitBelowLimit() throws Exception { doTestTrailingHeadersSizeLimit(17, "x-trailer: Test", false); } @Test public void testTrailingHeadersSizeLimitAtLimit() throws Exception { doTestTrailingHeadersSizeLimit(18, "x-trailer: Test", false); } @Test public void testTrailingHeadersSizeLimitAboveLimit() throws Exception { doTestTrailingHeadersSizeLimit(19, "x-trailer: Test", true); } /* * This test uses the fact that the header is simply concatenated to insert a pipelined request. The pipelined * request should not trigger the trailing header size limit. Note that 19 is just enough for the first request. */ @Test public void testTrailingHeadersSizeLimitPipelining() throws Exception { // @formatter:off doTestTrailingHeadersSizeLimit(19, "x-trailer: Test" + CRLF + CRLF + "POST /echo-params.jsp HTTP/1.1" + CRLF + "Host: any" + CRLF + "Transfer-encoding: chunked" + CRLF + SimpleHttpClient.HTTP_HEADER_CONTENT_TYPE_FORM_URL_ENCODING + "Connection: close" + CRLF + CRLF + "3" + CRLF + "a=0" + CRLF + "4" + CRLF + "&b=1" + CRLF + "0" + CRLF + "x-trailer: Test", true); // @formatter:on } /* * Since limit includes CRLF at end of trailer and final CRLF */ private void doTestTrailingHeadersSizeLimit(int trailerSizeLimit, String trailerHeader, boolean pass) throws Exception { // Setup Tomcat instance Tomcat tomcat = getTomcatInstance(); // No file system docBase required Context ctx = getProgrammaticRootContext(); Tomcat.addServlet(ctx, "servlet", new EchoHeaderServlet(false)); ctx.addServletMappingDecoded("/", "servlet"); // Limit the size of the trailing header Assert.assertTrue(tomcat.getConnector().setProperty("maxTrailerSize", Integer.toString(trailerSizeLimit))); tomcat.start(); // @formatter:off String[] request = new String[] { "POST /echo-params.jsp HTTP/1.1" + CRLF + "Host: any" + CRLF + "Transfer-encoding: chunked" + CRLF + SimpleHttpClient.HTTP_HEADER_CONTENT_TYPE_FORM_URL_ENCODING + "Connection: close" + CRLF + CRLF + "3" + CRLF + "a=0" + CRLF + "4" + CRLF + "&b=1" + CRLF + "0" + CRLF + trailerHeader + CRLF + CRLF }; // @formatter:on TrailerClient client = new TrailerClient(tomcat.getConnector().getLocalPort()); client.setRequest(request); client.connect(); client.processRequest(); if (pass) { Assert.assertTrue(client.isResponse200()); } else { Assert.assertTrue(client.isResponse500()); } } @Test public void testExtensionSizeLimitOneBelow() throws Exception { doTestExtensionSizeLimit(EXT_SIZE_LIMIT - 1, true); } @Test public void testExtensionSizeLimitExact() throws Exception { doTestExtensionSizeLimit(EXT_SIZE_LIMIT, true); } @Test public void testExtensionSizeLimitOneOver() throws Exception { doTestExtensionSizeLimit(EXT_SIZE_LIMIT + 1, false); } private void doTestExtensionSizeLimit(int len, boolean ok) throws Exception { // Setup Tomcat instance Tomcat tomcat = getTomcatInstance(); Assert.assertTrue(tomcat.getConnector().setProperty( "maxExtensionSize", Integer.toString(EXT_SIZE_LIMIT))); // No file system docBase required Context ctx = getProgrammaticRootContext(); Tomcat.addServlet(ctx, "servlet", new EchoHeaderServlet(ok)); ctx.addServletMappingDecoded("/", "servlet"); tomcat.start(); String extName = ";foo="; StringBuilder extValue = new StringBuilder(len); for (int i = 0; i < (len - extName.length()); i++) { extValue.append('x'); } // @formatter:off String[] request = new String[] { "POST /echo-params.jsp HTTP/1.1" + CRLF + "Host: any" + CRLF + "Transfer-encoding: chunked" + CRLF + SimpleHttpClient.HTTP_HEADER_CONTENT_TYPE_FORM_URL_ENCODING + "Connection: close" + CRLF + CRLF + "3" + extName + extValue.toString() + CRLF + "a=0" + CRLF + "4" + CRLF + "&b=1" + CRLF + "0" + CRLF + CRLF }; // @formatter:on TrailerClient client = new TrailerClient(tomcat.getConnector().getLocalPort()); client.setRequest(request); client.connect(); client.processRequest(); if (ok) { Assert.assertTrue(client.isResponse200()); } else { Assert.assertTrue(client.isResponse500()); } } @Test public void testNoTrailingHeaders() throws Exception { // Setup Tomcat instance Tomcat tomcat = getTomcatInstance(); // No file system docBase required Context ctx = getProgrammaticRootContext(); Tomcat.addServlet(ctx, "servlet", new EchoHeaderServlet(true)); ctx.addServletMappingDecoded("/", "servlet"); tomcat.start(); // @formatter:off String request = "POST /echo-params.jsp HTTP/1.1" + CRLF + "Host: any" + CRLF + "Transfer-encoding: chunked" + CRLF + SimpleHttpClient.HTTP_HEADER_CONTENT_TYPE_FORM_URL_ENCODING + "Connection: close" + CRLF + CRLF + "3" + CRLF + "a=0" + CRLF + "4" + CRLF + "&b=1" + CRLF + "0" + CRLF + CRLF; // @formatter:on TrailerClient client = new TrailerClient(tomcat.getConnector().getLocalPort()); client.setRequest(new String[] {request}); client.connect(); client.processRequest(); Assert.assertEquals("nullnull7nullnull", client.getResponseBody()); } @Test public void testChunkSizeZero() throws Exception { doTestChunkSize(true, true, "", 10, 0); } @Test public void testChunkSizeAbsent() throws Exception { doTestChunkSize(false, false, CRLF, 10, 0); } @Test public void testChunkSizeTwentyFive() throws Exception { doTestChunkSize(true, true, "19" + CRLF + "Hello World!Hello World!!" + CRLF, 40, 25); } @Test public void testChunkSizeEightDigit() throws Exception { doTestChunkSize(true, true, "0000000C" + CRLF + "Hello World!" + CRLF, 20, 12); } @Test public void testChunkSizeNineDigit() throws Exception { doTestChunkSize(false, false, "00000000C" + CRLF + "Hello World!" + CRLF, 20, 12); } @Test public void testChunkSizeLong() throws Exception { doTestChunkSize(true, false, "7fFFffFF" + CRLF + "Hello World!" + CRLF, 10, 10); } @Test public void testChunkSizeIntegerMinValue() throws Exception { doTestChunkSize(false, false, "80000000" + CRLF + "Hello World!" + CRLF, 10, 10); } @Test public void testChunkSizeMinusOne() throws Exception { doTestChunkSize(false, false, "ffffffff" + CRLF + "Hello World!" + CRLF, 10, 10); } /** * @param expectPass * If the servlet is expected to process the request * @param expectReadWholeBody * If the servlet is expected to fully read the body and reliably * deliver a response * @param chunks * Text of chunks * @param readLimit * Do not read more than this many bytes * @param expectReadCount * Expected count of read bytes * @throws Exception * Unexpected */ private void doTestChunkSize(boolean expectPass, boolean expectReadWholeBody, String chunks, int readLimit, int expectReadCount) throws Exception { // Setup Tomcat instance Tomcat tomcat = getTomcatInstance(); // No file system docBase required Context ctx = getProgrammaticRootContext(); BodyReadServlet servlet = new BodyReadServlet(expectPass, readLimit); Tomcat.addServlet(ctx, "servlet", servlet); ctx.addServletMappingDecoded("/", "servlet"); tomcat.start(); // @formatter:off String request = "POST /echo-params.jsp HTTP/1.1" + CRLF + "Host: any" + CRLF + "Transfer-encoding: chunked" + CRLF + "Content-Type: text/plain" + CRLF + (expectPass ? "Connection: close" + CRLF : "") + CRLF + chunks + "0" + CRLF + CRLF; // @formatter:on TrailerClient client = new TrailerClient(tomcat.getConnector().getLocalPort()); // Need to use the content length here as variations in Connector and // JVM+OS behaviour mean that in some circumstances the client may see // an IOException rather than the response body when the server closes // the connection. client.setUseContentLength(true); client.setRequest(new String[] { request }); Exception processException = null; client.connect(); try { client.processRequest(); client.disconnect(); } catch (Exception e) { // Socket was probably closed before client had a chance to read // response processException = e; } if (expectPass) { if (expectReadWholeBody) { Assert.assertNull(processException); } if (processException == null) { Assert.assertTrue(client.getResponseLine(), client.isResponse200()); Assert.assertEquals(String.valueOf(expectReadCount), client.getResponseBody()); } Assert.assertEquals(expectReadCount, servlet.getCountRead()); } else { if (processException == null) { Assert.assertTrue(client.getResponseLine(), client.isResponse500()); } Assert.assertEquals(0, servlet.getCountRead()); Assert.assertTrue(servlet.getExceptionDuringRead()); } } @Test public void testTrailerHeaderNameNotTokenThrowException() throws Exception { doTestTrailerHeaderNameNotToken(false); } @Test public void testTrailerHeaderNameNotTokenSwallowException() throws Exception { doTestTrailerHeaderNameNotToken(true); } private void doTestTrailerHeaderNameNotToken(boolean swallowException) throws Exception { // Setup Tomcat instance Tomcat tomcat = getTomcatInstance(); // No file system docBase required Context ctx = getProgrammaticRootContext(); Tomcat.addServlet(ctx, "servlet", new SwallowBodyServlet(swallowException)); ctx.addServletMappingDecoded("/", "servlet"); tomcat.start(); // @formatter:off String[] request = new String[] { "POST / HTTP/1.1" + CRLF + "Host: localhost" + CRLF + "Transfer-encoding: chunked" + CRLF + SimpleHttpClient.HTTP_HEADER_CONTENT_TYPE_FORM_URL_ENCODING + "Connection: close" + CRLF + CRLF + "3" + CRLF + "a=0" + CRLF + "4" + CRLF + "&b=1" + CRLF + "0" + CRLF + "x@trailer: Test" + CRLF + CRLF }; // @formatter:on TrailerClient client = new TrailerClient(tomcat.getConnector().getLocalPort()); client.setRequest(request); client.connect(); client.processRequest(); // Expected to fail because of invalid trailer header name Assert.assertTrue(client.getResponseLine(), client.isResponse400()); } private static class SwallowBodyServlet extends HttpServlet { private static final long serialVersionUID = 1L; private final boolean swallowException; SwallowBodyServlet(boolean swallowException) { this.swallowException = swallowException; } @Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { resp.setContentType("text/plain"); PrintWriter pw = resp.getWriter(); // Read the body InputStream is = req.getInputStream(); try { while (is.read() > -1) { } pw.write("OK"); } catch (IOException ioe) { if (!swallowException) { throw ioe; } } } } private static class EchoHeaderServlet extends HttpServlet { private static final long serialVersionUID = 1L; private boolean exceptionDuringRead = false; private final boolean expectPass; EchoHeaderServlet(boolean expectPass) { this.expectPass = expectPass; } @Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { resp.setContentType("text/plain"); PrintWriter pw = resp.getWriter(); // Headers not visible yet, body not processed dumpHeader("x-trailer1", req, pw); dumpHeader("x-trailer2", req, pw); // Read the body - quick and dirty InputStream is = req.getInputStream(); int count = 0; try { while (is.read() > -1) { count++; } } catch (IOException ioe) { exceptionDuringRead = true; if (!expectPass) { // as expected log(ioe.toString()); resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); return; } throw ioe; } pw.write(Integer.toString(count)); // Headers should be visible now dumpHeader("x-trailer1", req, pw); dumpHeader("x-trailer2", req, pw); } public boolean getExceptionDuringRead() { return exceptionDuringRead; } private void dumpHeader(String headerName, HttpServletRequest req, PrintWriter pw) { String value = req.getTrailerFields().get(headerName); if (value == null) { value = "null"; } pw.write(value); } } private static class BodyReadServlet extends HttpServlet { private static final long serialVersionUID = 1L; private boolean exceptionDuringRead = false; private int countRead = 0; private final boolean expectPass; private final int readLimit; BodyReadServlet(boolean expectPass, int readLimit) { this.expectPass = expectPass; this.readLimit = readLimit; } @Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { resp.setContentType("text/plain"); PrintWriter pw = resp.getWriter(); // Read the body - quick and dirty InputStream is = req.getInputStream(); try { while (is.read() > -1 && countRead < readLimit) { countRead++; } } catch (IOException ioe) { exceptionDuringRead = true; if (!expectPass) { // as expected log(ioe.toString()); resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); return; } throw ioe; } pw.write(Integer.toString(countRead)); } public boolean getExceptionDuringRead() { return exceptionDuringRead; } public int getCountRead() { return countRead; } } private static class TrailerClient extends SimpleHttpClient { TrailerClient(int port) { setPort(port); } @Override public boolean isResponseBodyOK() { return getResponseBody().contains("TestTestTest"); } } @Test public void doTestIncompleteChunkedBody() throws Exception { // Setup Tomcat instance Tomcat tomcat = getTomcatInstance(); // No file system docBase required Context ctx = getProgrammaticRootContext(); Tomcat.addServlet(ctx, "servlet", new SwallowBodyServlet(false)); ctx.addServletMappingDecoded("/", "servlet"); tomcat.start(); // @formatter:off String[] request = new String[] { "POST / HTTP/1.1" + CRLF + "Host: localhost" + CRLF + "Transfer-encoding: chunked" + CRLF + CRLF + "3" + CRLF }; // @formatter:on TrailerClient client = new TrailerClient(tomcat.getConnector().getLocalPort()); client.setUseContentLength(true); client.setRequest(request); client.connect(); try { client.processRequest(); } catch (IOException ioe) { // Ignore - Triggered by connection being dropped after error } // NIO2 may (will?) return null here String responseLine = client.getResponseLine(); if (responseLine == null) { // 400 response not read(/written?) before connection was dropped. } else { Assert.assertTrue(client.getResponseLine(), client.isResponse400()); } } @Test public void doTestMaxSwallowSizeBelow() throws Exception { doTestMaxSwallowSize(1000, true); } @Test public void doTestMaxSwallowSizeAbove() throws Exception { doTestMaxSwallowSize(10, false); } private void doTestMaxSwallowSize(int maxSwallowSize, boolean pass) throws Exception { // Setup Tomcat instance Tomcat tomcat = getTomcatInstance(); tomcat.getConnector().setProperty("connectionTimeout", "300000"); // Reduce limits to facilitate testing tomcat.getConnector().setProperty("maxSwallowSize", Integer.toString(maxSwallowSize)); // No file system docBase required Context ctx = getProgrammaticRootContext(); Tomcat.addServlet(ctx, "servlet", new TesterServlet(false)); ctx.addServletMappingDecoded("/", "servlet"); tomcat.start(); // @formatter:off String[] request = new String[] { "GET / HTTP/1.1" + CRLF + "Host: localhost" + CRLF + "Transfer-encoding: chunked" + CRLF + CRLF + "20" + CRLF + "01234567890123456789012345678901" + CRLF + "0" + CRLF + CRLF }; // @formatter:on TrailerClient client = new TrailerClient(tomcat.getConnector().getLocalPort()); client.setUseContentLength(true); client.setRequest(request); client.connect(); client.sendRequest(); client.readResponse(true); // Response is committed before connection is closed. Assert.assertTrue(client.getResponseLine(), client.isResponse200()); // Repeat request - should fail client.resetResponse(); client.sendRequest(); try { client.readResponse(true); } catch (IOException ioe) { // Ignore - in case the read fails due to a closed connection } if (pass) { Assert.assertTrue(client.getResponseLine(), client.isResponse200()); } else { // Connection reset Assert.assertNull(client.getResponseLine()); } } private static class BodyReadLineServlet extends HttpServlet { private static final long serialVersionUID = 1L; @Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { int lineCount = 0; int pauseCount = 0; // Read the body one line at a time. There should be ~1s between reads. try (InputStream is = req.getInputStream(); InputStreamReader isr = new InputStreamReader(is, StandardCharsets.UTF_8); BufferedReader br = new BufferedReader(isr)) { long lastRead = 0; while (br.readLine() != null) { long thisRead = System.nanoTime(); if (lineCount > 0) { /* * After the first line, look for a pause of at least 800ms between reads. */ if ((thisRead - lastRead) > TimeUnit.MILLISECONDS.toNanos(800)) { pauseCount++; } } lastRead = thisRead; lineCount++; } } resp.setContentType("text/plain"); PrintWriter pw = resp.getWriter(); pw.write(Integer.toString(lineCount) + "," + Integer.toString(pauseCount)); } } private static class NonBlockingReadLineServlet extends HttpServlet { private static final long serialVersionUID = 1L; int lineCount = 0; int pauseCount = 0; long lastRead = 0; @Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { final AsyncContext ctx = req.startAsync(); ServletInputStream is = req.getInputStream(); is.setReadListener(new ReadListener() { @Override public void onDataAvailable() throws IOException { byte[] buf = new byte[1024]; do { int n = is.read(buf); if (n < 0) { break; } else if (n > 0) { String line = new String(buf, 0, n, StandardCharsets.UTF_8); Assert.assertTrue(line.length() > 0); long thisRead = System.nanoTime(); if (lineCount > 0) { /* * After the first line, look for a pause of at least 800ms between reads. */ if ((thisRead - lastRead) > TimeUnit.MILLISECONDS.toNanos(800)) { pauseCount++; } } lastRead = thisRead; lineCount++; } } while (is.isReady()); } @Override public void onAllDataRead() throws IOException { resp.setContentType("text/plain"); PrintWriter pw = resp.getWriter(); pw.write(Integer.toString(lineCount) + "," + Integer.toString(pauseCount)); ctx.complete(); } @Override public void onError(Throwable throwable) { throwable.printStackTrace(); } }); } } private static class ReadLineClient extends SimpleHttpClient { ReadLineClient(int port) { setPort(port); } @Override public boolean isResponseBodyOK() { return getResponseBody().equals("5"); } } @Test public void testChunkedSplitWithReader() throws Exception { // Setup Tomcat instance Tomcat tomcat = getTomcatInstance(); // No file system docBase required Context ctx = getProgrammaticRootContext(); BodyReadLineServlet servlet = new BodyReadLineServlet(); Tomcat.addServlet(ctx, "servlet", servlet); ctx.addServletMappingDecoded("/test", "servlet"); tomcat.getConnector().setProperty("connectionTimeout", "300000"); tomcat.start(); // @formatter:off String[] request = new String[] { "POST /test HTTP/1.1" + CRLF + "Host: any" + CRLF + "Transfer-encoding: chunked" + CRLF + SimpleHttpClient.HTTP_HEADER_CONTENT_TYPE_FORM_URL_ENCODING + "Connection: close" + CRLF + CRLF + "7" + CRLF + "DATA01\n", CRLF + "7", CRLF + "DATA02\n" + CRLF, "7" + CRLF + // Split the CRLF between writes "DATA03\n" + SimpleHttpClient.CR, SimpleHttpClient.LF + "7" + CRLF + "DATA04\n", CRLF + "13" + CRLF, "DATA05DATA05DATA05\n" + CRLF + "0" + CRLF + CRLF }; // @formatter:on ReadLineClient client = new ReadLineClient(tomcat.getConnector().getLocalPort()); client.setRequest(request); client.connect(300000,300000); client.processRequest(); Assert.assertTrue(client.isResponse200()); /* * Output is "<lines read>,<pauses observer>" so there should be 5 lines read with a pause between each. */ Assert.assertEquals("5,4", client.getResponseBody()); } @Test public void testChunkedSplitWithNonBlocking() throws Exception { // Setup Tomcat instance Tomcat tomcat = getTomcatInstance(); // No file system docBase required Context ctx = getProgrammaticRootContext(); NonBlockingReadLineServlet servlet = new NonBlockingReadLineServlet(); Wrapper wrapper = Tomcat.addServlet(ctx, "servlet", servlet); wrapper.setAsyncSupported(true); ctx.addServletMappingDecoded("/test", "servlet"); tomcat.getConnector().setProperty("connectionTimeout", "300000"); tomcat.start(); // @formatter:off String[] request = new String[] { "POST /test HTTP/1.1" + CRLF + "Host: any" + CRLF + "Transfer-encoding: chunked" + CRLF + SimpleHttpClient.HTTP_HEADER_CONTENT_TYPE_FORM_URL_ENCODING + "Connection: close" + CRLF + CRLF + "7" + CRLF + "DATA01\n", CRLF + "7", CRLF + "DATA02\n" + CRLF, "7" + CRLF + // Split the CRLF between writes "DATA03\n" + SimpleHttpClient.CR, SimpleHttpClient.LF + "7" + CRLF + "DATA04\n", CRLF + "13" + CRLF, "DATA05DATA05DATA05\n" + CRLF + "0" + CRLF + CRLF }; // @formatter:on ReadLineClient client = new ReadLineClient(tomcat.getConnector().getLocalPort()); client.setRequest(request); client.connect(300000,300000); client.processRequest(); Assert.assertTrue(client.isResponse200()); /* * Output is "<lines read>,<pauses observer>" so there should be 5 lines read with a pause between each. */ Assert.assertEquals("5,4", client.getResponseBody()); } }
googleapis/google-cloud-java
35,264
java-functions/proto-google-cloud-functions-v2alpha/src/main/java/com/google/cloud/functions/v2alpha/GenerateUploadUrlResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/functions/v2alpha/functions.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.functions.v2alpha; /** * * * <pre> * Response of `GenerateSourceUploadUrl` method. * </pre> * * Protobuf type {@code google.cloud.functions.v2alpha.GenerateUploadUrlResponse} */ public final class GenerateUploadUrlResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.functions.v2alpha.GenerateUploadUrlResponse) GenerateUploadUrlResponseOrBuilder { private static final long serialVersionUID = 0L; // Use GenerateUploadUrlResponse.newBuilder() to construct. private GenerateUploadUrlResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private GenerateUploadUrlResponse() { uploadUrl_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new GenerateUploadUrlResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.functions.v2alpha.FunctionsProto .internal_static_google_cloud_functions_v2alpha_GenerateUploadUrlResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.functions.v2alpha.FunctionsProto .internal_static_google_cloud_functions_v2alpha_GenerateUploadUrlResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse.class, com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse.Builder.class); } private int bitField0_; public static final int UPLOAD_URL_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object uploadUrl_ = ""; /** * * * <pre> * The generated Google Cloud Storage signed URL that should be used for a * function source code upload. The uploaded file should be a zip archive * which contains a function. * </pre> * * <code>string upload_url = 1;</code> * * @return The uploadUrl. */ @java.lang.Override public java.lang.String getUploadUrl() { java.lang.Object ref = uploadUrl_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); uploadUrl_ = s; return s; } } /** * * * <pre> * The generated Google Cloud Storage signed URL that should be used for a * function source code upload. The uploaded file should be a zip archive * which contains a function. * </pre> * * <code>string upload_url = 1;</code> * * @return The bytes for uploadUrl. */ @java.lang.Override public com.google.protobuf.ByteString getUploadUrlBytes() { java.lang.Object ref = uploadUrl_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); uploadUrl_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int STORAGE_SOURCE_FIELD_NUMBER = 2; private com.google.cloud.functions.v2alpha.StorageSource storageSource_; /** * * * <pre> * The location of the source code in the upload bucket. * * Once the archive is uploaded using the `upload_url` use this field to * set the `function.build_config.source.storage_source` * during CreateFunction and UpdateFunction. * * Generation defaults to 0, as Cloud Storage provides a new generation only * upon uploading a new object or version of an object. * </pre> * * <code>.google.cloud.functions.v2alpha.StorageSource storage_source = 2;</code> * * @return Whether the storageSource field is set. */ @java.lang.Override public boolean hasStorageSource() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The location of the source code in the upload bucket. * * Once the archive is uploaded using the `upload_url` use this field to * set the `function.build_config.source.storage_source` * during CreateFunction and UpdateFunction. * * Generation defaults to 0, as Cloud Storage provides a new generation only * upon uploading a new object or version of an object. * </pre> * * <code>.google.cloud.functions.v2alpha.StorageSource storage_source = 2;</code> * * @return The storageSource. */ @java.lang.Override public com.google.cloud.functions.v2alpha.StorageSource getStorageSource() { return storageSource_ == null ? com.google.cloud.functions.v2alpha.StorageSource.getDefaultInstance() : storageSource_; } /** * * * <pre> * The location of the source code in the upload bucket. * * Once the archive is uploaded using the `upload_url` use this field to * set the `function.build_config.source.storage_source` * during CreateFunction and UpdateFunction. * * Generation defaults to 0, as Cloud Storage provides a new generation only * upon uploading a new object or version of an object. * </pre> * * <code>.google.cloud.functions.v2alpha.StorageSource storage_source = 2;</code> */ @java.lang.Override public com.google.cloud.functions.v2alpha.StorageSourceOrBuilder getStorageSourceOrBuilder() { return storageSource_ == null ? com.google.cloud.functions.v2alpha.StorageSource.getDefaultInstance() : storageSource_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(uploadUrl_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, uploadUrl_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getStorageSource()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(uploadUrl_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, uploadUrl_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getStorageSource()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse)) { return super.equals(obj); } com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse other = (com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse) obj; if (!getUploadUrl().equals(other.getUploadUrl())) return false; if (hasStorageSource() != other.hasStorageSource()) return false; if (hasStorageSource()) { if (!getStorageSource().equals(other.getStorageSource())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + UPLOAD_URL_FIELD_NUMBER; hash = (53 * hash) + getUploadUrl().hashCode(); if (hasStorageSource()) { hash = (37 * hash) + STORAGE_SOURCE_FIELD_NUMBER; hash = (53 * hash) + getStorageSource().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response of `GenerateSourceUploadUrl` method. * </pre> * * Protobuf type {@code google.cloud.functions.v2alpha.GenerateUploadUrlResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.functions.v2alpha.GenerateUploadUrlResponse) com.google.cloud.functions.v2alpha.GenerateUploadUrlResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.functions.v2alpha.FunctionsProto .internal_static_google_cloud_functions_v2alpha_GenerateUploadUrlResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.functions.v2alpha.FunctionsProto .internal_static_google_cloud_functions_v2alpha_GenerateUploadUrlResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse.class, com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse.Builder.class); } // Construct using com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getStorageSourceFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; uploadUrl_ = ""; storageSource_ = null; if (storageSourceBuilder_ != null) { storageSourceBuilder_.dispose(); storageSourceBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.functions.v2alpha.FunctionsProto .internal_static_google_cloud_functions_v2alpha_GenerateUploadUrlResponse_descriptor; } @java.lang.Override public com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse getDefaultInstanceForType() { return com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse build() { com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse buildPartial() { com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse result = new com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.uploadUrl_ = uploadUrl_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.storageSource_ = storageSourceBuilder_ == null ? storageSource_ : storageSourceBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse) { return mergeFrom((com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse other) { if (other == com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse.getDefaultInstance()) return this; if (!other.getUploadUrl().isEmpty()) { uploadUrl_ = other.uploadUrl_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasStorageSource()) { mergeStorageSource(other.getStorageSource()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { uploadUrl_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getStorageSourceFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object uploadUrl_ = ""; /** * * * <pre> * The generated Google Cloud Storage signed URL that should be used for a * function source code upload. The uploaded file should be a zip archive * which contains a function. * </pre> * * <code>string upload_url = 1;</code> * * @return The uploadUrl. */ public java.lang.String getUploadUrl() { java.lang.Object ref = uploadUrl_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); uploadUrl_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The generated Google Cloud Storage signed URL that should be used for a * function source code upload. The uploaded file should be a zip archive * which contains a function. * </pre> * * <code>string upload_url = 1;</code> * * @return The bytes for uploadUrl. */ public com.google.protobuf.ByteString getUploadUrlBytes() { java.lang.Object ref = uploadUrl_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); uploadUrl_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The generated Google Cloud Storage signed URL that should be used for a * function source code upload. The uploaded file should be a zip archive * which contains a function. * </pre> * * <code>string upload_url = 1;</code> * * @param value The uploadUrl to set. * @return This builder for chaining. */ public Builder setUploadUrl(java.lang.String value) { if (value == null) { throw new NullPointerException(); } uploadUrl_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The generated Google Cloud Storage signed URL that should be used for a * function source code upload. The uploaded file should be a zip archive * which contains a function. * </pre> * * <code>string upload_url = 1;</code> * * @return This builder for chaining. */ public Builder clearUploadUrl() { uploadUrl_ = getDefaultInstance().getUploadUrl(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * The generated Google Cloud Storage signed URL that should be used for a * function source code upload. The uploaded file should be a zip archive * which contains a function. * </pre> * * <code>string upload_url = 1;</code> * * @param value The bytes for uploadUrl to set. * @return This builder for chaining. */ public Builder setUploadUrlBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); uploadUrl_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.cloud.functions.v2alpha.StorageSource storageSource_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.functions.v2alpha.StorageSource, com.google.cloud.functions.v2alpha.StorageSource.Builder, com.google.cloud.functions.v2alpha.StorageSourceOrBuilder> storageSourceBuilder_; /** * * * <pre> * The location of the source code in the upload bucket. * * Once the archive is uploaded using the `upload_url` use this field to * set the `function.build_config.source.storage_source` * during CreateFunction and UpdateFunction. * * Generation defaults to 0, as Cloud Storage provides a new generation only * upon uploading a new object or version of an object. * </pre> * * <code>.google.cloud.functions.v2alpha.StorageSource storage_source = 2;</code> * * @return Whether the storageSource field is set. */ public boolean hasStorageSource() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The location of the source code in the upload bucket. * * Once the archive is uploaded using the `upload_url` use this field to * set the `function.build_config.source.storage_source` * during CreateFunction and UpdateFunction. * * Generation defaults to 0, as Cloud Storage provides a new generation only * upon uploading a new object or version of an object. * </pre> * * <code>.google.cloud.functions.v2alpha.StorageSource storage_source = 2;</code> * * @return The storageSource. */ public com.google.cloud.functions.v2alpha.StorageSource getStorageSource() { if (storageSourceBuilder_ == null) { return storageSource_ == null ? com.google.cloud.functions.v2alpha.StorageSource.getDefaultInstance() : storageSource_; } else { return storageSourceBuilder_.getMessage(); } } /** * * * <pre> * The location of the source code in the upload bucket. * * Once the archive is uploaded using the `upload_url` use this field to * set the `function.build_config.source.storage_source` * during CreateFunction and UpdateFunction. * * Generation defaults to 0, as Cloud Storage provides a new generation only * upon uploading a new object or version of an object. * </pre> * * <code>.google.cloud.functions.v2alpha.StorageSource storage_source = 2;</code> */ public Builder setStorageSource(com.google.cloud.functions.v2alpha.StorageSource value) { if (storageSourceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } storageSource_ = value; } else { storageSourceBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The location of the source code in the upload bucket. * * Once the archive is uploaded using the `upload_url` use this field to * set the `function.build_config.source.storage_source` * during CreateFunction and UpdateFunction. * * Generation defaults to 0, as Cloud Storage provides a new generation only * upon uploading a new object or version of an object. * </pre> * * <code>.google.cloud.functions.v2alpha.StorageSource storage_source = 2;</code> */ public Builder setStorageSource( com.google.cloud.functions.v2alpha.StorageSource.Builder builderForValue) { if (storageSourceBuilder_ == null) { storageSource_ = builderForValue.build(); } else { storageSourceBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The location of the source code in the upload bucket. * * Once the archive is uploaded using the `upload_url` use this field to * set the `function.build_config.source.storage_source` * during CreateFunction and UpdateFunction. * * Generation defaults to 0, as Cloud Storage provides a new generation only * upon uploading a new object or version of an object. * </pre> * * <code>.google.cloud.functions.v2alpha.StorageSource storage_source = 2;</code> */ public Builder mergeStorageSource(com.google.cloud.functions.v2alpha.StorageSource value) { if (storageSourceBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && storageSource_ != null && storageSource_ != com.google.cloud.functions.v2alpha.StorageSource.getDefaultInstance()) { getStorageSourceBuilder().mergeFrom(value); } else { storageSource_ = value; } } else { storageSourceBuilder_.mergeFrom(value); } if (storageSource_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * The location of the source code in the upload bucket. * * Once the archive is uploaded using the `upload_url` use this field to * set the `function.build_config.source.storage_source` * during CreateFunction and UpdateFunction. * * Generation defaults to 0, as Cloud Storage provides a new generation only * upon uploading a new object or version of an object. * </pre> * * <code>.google.cloud.functions.v2alpha.StorageSource storage_source = 2;</code> */ public Builder clearStorageSource() { bitField0_ = (bitField0_ & ~0x00000002); storageSource_ = null; if (storageSourceBuilder_ != null) { storageSourceBuilder_.dispose(); storageSourceBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The location of the source code in the upload bucket. * * Once the archive is uploaded using the `upload_url` use this field to * set the `function.build_config.source.storage_source` * during CreateFunction and UpdateFunction. * * Generation defaults to 0, as Cloud Storage provides a new generation only * upon uploading a new object or version of an object. * </pre> * * <code>.google.cloud.functions.v2alpha.StorageSource storage_source = 2;</code> */ public com.google.cloud.functions.v2alpha.StorageSource.Builder getStorageSourceBuilder() { bitField0_ |= 0x00000002; onChanged(); return getStorageSourceFieldBuilder().getBuilder(); } /** * * * <pre> * The location of the source code in the upload bucket. * * Once the archive is uploaded using the `upload_url` use this field to * set the `function.build_config.source.storage_source` * during CreateFunction and UpdateFunction. * * Generation defaults to 0, as Cloud Storage provides a new generation only * upon uploading a new object or version of an object. * </pre> * * <code>.google.cloud.functions.v2alpha.StorageSource storage_source = 2;</code> */ public com.google.cloud.functions.v2alpha.StorageSourceOrBuilder getStorageSourceOrBuilder() { if (storageSourceBuilder_ != null) { return storageSourceBuilder_.getMessageOrBuilder(); } else { return storageSource_ == null ? com.google.cloud.functions.v2alpha.StorageSource.getDefaultInstance() : storageSource_; } } /** * * * <pre> * The location of the source code in the upload bucket. * * Once the archive is uploaded using the `upload_url` use this field to * set the `function.build_config.source.storage_source` * during CreateFunction and UpdateFunction. * * Generation defaults to 0, as Cloud Storage provides a new generation only * upon uploading a new object or version of an object. * </pre> * * <code>.google.cloud.functions.v2alpha.StorageSource storage_source = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.functions.v2alpha.StorageSource, com.google.cloud.functions.v2alpha.StorageSource.Builder, com.google.cloud.functions.v2alpha.StorageSourceOrBuilder> getStorageSourceFieldBuilder() { if (storageSourceBuilder_ == null) { storageSourceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.functions.v2alpha.StorageSource, com.google.cloud.functions.v2alpha.StorageSource.Builder, com.google.cloud.functions.v2alpha.StorageSourceOrBuilder>( getStorageSource(), getParentForChildren(), isClean()); storageSource_ = null; } return storageSourceBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.functions.v2alpha.GenerateUploadUrlResponse) } // @@protoc_insertion_point(class_scope:google.cloud.functions.v2alpha.GenerateUploadUrlResponse) private static final com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse(); } public static com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<GenerateUploadUrlResponse> PARSER = new com.google.protobuf.AbstractParser<GenerateUploadUrlResponse>() { @java.lang.Override public GenerateUploadUrlResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<GenerateUploadUrlResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<GenerateUploadUrlResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.functions.v2alpha.GenerateUploadUrlResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/hop
35,179
plugins/transforms/salesforce/src/main/java/org/apache/hop/pipeline/transforms/salesforce/SalesforceConnection.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hop.pipeline.transforms.salesforce; import com.sforce.soap.partner.DeleteResult; import com.sforce.soap.partner.DeletedRecord; import com.sforce.soap.partner.DescribeGlobalResult; import com.sforce.soap.partner.DescribeGlobalSObjectResult; import com.sforce.soap.partner.DescribeSObjectResult; import com.sforce.soap.partner.Field; import com.sforce.soap.partner.FieldType; import com.sforce.soap.partner.GetDeletedResult; import com.sforce.soap.partner.GetUpdatedResult; import com.sforce.soap.partner.GetUserInfoResult; import com.sforce.soap.partner.LoginResult; import com.sforce.soap.partner.PartnerConnection; import com.sforce.soap.partner.QueryResult; import com.sforce.soap.partner.SaveResult; import com.sforce.soap.partner.UpsertResult; import com.sforce.soap.partner.fault.ExceptionCode; import com.sforce.soap.partner.fault.LoginFault; import com.sforce.soap.partner.sobject.SObject; import com.sforce.ws.ConnectionException; import com.sforce.ws.ConnectorConfig; import com.sforce.ws.bind.XmlObject; import com.sforce.ws.wsdl.Constants; import java.io.IOException; import java.io.StringWriter; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.GregorianCalendar; import java.util.HashMap; import java.util.Iterator; import java.util.List; import javax.xml.namespace.QName; import javax.xml.soap.SOAPException; import org.apache.commons.lang.StringUtils; import org.apache.hop.core.Const; import org.apache.hop.core.encryption.Encr; import org.apache.hop.core.exception.HopException; import org.apache.hop.core.logging.HopLogStore; import org.apache.hop.core.logging.ILogChannel; import org.apache.hop.core.util.Utils; import org.apache.hop.i18n.BaseMessages; import org.json.simple.JSONArray; import org.json.simple.JSONObject; public class SalesforceConnection { private static final FieldType ID_FIELD_TYPE = FieldType.id; private static final FieldType REFERENCE_FIELD_TYPE = FieldType.reference; private static final Class<?> PKG = SalesforceConnection.class; private String url; private String username; private String password; private String module; private int timeout; private PartnerConnection binding; private LoginResult loginResult; private GetUserInfoResult userInfo; private String sql; private Date serverTimestamp; private QueryResult qr; private GregorianCalendar startDate; private GregorianCalendar endDate; private SObject[] sObjects; private int recordsFilter; private String fieldsList; private int queryResultSize; private int recordsCount; private boolean useCompression; private boolean rollbackAllChangesOnError; private boolean queryAll; private HashMap<String, Date> getDeletedList; private ILogChannel log; /** Construct a new Salesforce Connection */ public SalesforceConnection( ILogChannel logInterface, String url, String username, String password) throws HopException { if (logInterface == null) { this.log = HopLogStore.getLogChannelFactory().create(this); } else { this.log = logInterface; } this.url = url; setUsername(username); setPassword(password); setTimeOut(0); this.binding = null; this.loginResult = null; this.userInfo = null; this.sql = null; this.serverTimestamp = null; this.qr = null; this.startDate = null; this.endDate = null; this.sObjects = null; this.recordsFilter = SalesforceConnectionUtils.RECORDS_FILTER_ALL; this.fieldsList = null; this.queryResultSize = 0; this.recordsCount = 0; setUsingCompression(false); setRollbackAllChangesOnError(false); // check target URL if (Utils.isEmpty(getURL())) { throw new HopException( BaseMessages.getString(PKG, "SalesforceConnection.TargetURLMissing.Error")); } // check username if (Utils.isEmpty(getUsername())) { throw new HopException( BaseMessages.getString(PKG, "SalesforceConnection.UsernameMissing.Error")); } if (log.isDetailed()) { logInterface.logDetailed( BaseMessages.getString(PKG, "SalesforceConnection.Log.NewConnection")); } } public boolean isRollbackAllChangesOnError() { return this.rollbackAllChangesOnError; } public void setRollbackAllChangesOnError(boolean value) { this.rollbackAllChangesOnError = value; } public boolean isQueryAll() { return this.queryAll; } public void setQueryAll(boolean value) { this.queryAll = value; } public void setCalendar(int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate) throws HopException { this.startDate = startDate; this.endDate = endDate; this.recordsFilter = recordsFilter; if (this.startDate == null || this.endDate == null) { throw new HopException( BaseMessages.getString(PKG, "SalesforceConnection.Error.EmptyStartDateOrEndDate")); } if (this.startDate.getTime().compareTo(this.endDate.getTime()) >= 0) { throw new HopException(BaseMessages.getString(PKG, "SalesforceConnection.Error.WrongDates")); } // Calculate difference in days long diffDays = (this.endDate.getTime().getTime() - this.startDate.getTime().getTime()) / (24 * 60 * 60 * 1000); if (diffDays > 30) { throw new HopException( BaseMessages.getString(PKG, "SalesforceConnection.Error.StartDateTooOlder")); } } public void setSQL(String sql) { this.sql = sql; } public void setFieldsList(String fieldsList) { this.fieldsList = fieldsList; } public void setModule(String module) { this.module = module; } public String getURL() { return this.url; } public String getSQL() { return this.sql; } public Date getServerTimestamp() { return this.serverTimestamp; } public String getModule() { return this.module; } public QueryResult getQueryResult() { return this.qr; } public PartnerConnection createBinding(ConnectorConfig config) throws ConnectionException { if (this.binding == null) { this.binding = new PartnerConnection(config); } return this.binding; } public PartnerConnection getBinding() { return this.binding; } public void setTimeOut(int timeout) { this.timeout = timeout; } public int getTimeOut() { return this.timeout; } public boolean isUsingCompression() { return this.useCompression; } public void setUsingCompression(boolean useCompression) { this.useCompression = useCompression; } public String getUsername() { return this.username; } public void setUsername(String value) { this.username = value; } public String getPassword() { return this.password; } public void setPassword(String value) { this.password = value; } public void connect() throws HopException { ConnectorConfig config = new ConnectorConfig(); config.setAuthEndpoint(getURL()); config.setServiceEndpoint(getURL()); config.setUsername(getUsername()); config.setPassword(getPassword()); config.setCompression(isUsingCompression()); config.setManualLogin(true); String proxyUrl = System.getProperty("http.proxyHost", null); if (StringUtils.isNotEmpty(proxyUrl)) { int proxyPort = Integer.parseInt(System.getProperty("http.proxyPort", "80")); String proxyUser = System.getProperty("http.proxyUser", null); String proxyPassword = Encr.decryptPasswordOptionallyEncrypted(System.getProperty("http.proxyPassword", null)); config.setProxy(proxyUrl, proxyPort); config.setProxyUsername(proxyUser); config.setProxyPassword(proxyPassword); } // Set timeout if (getTimeOut() > 0) { if (log.isDebug()) { log.logDebug( BaseMessages.getString( PKG, "SalesforceConnection.Log.SettingTimeout", "" + this.timeout)); } config.setConnectionTimeout(getTimeOut()); config.setReadTimeout(getTimeOut()); } try { PartnerConnection pConnection = createBinding(config); if (log.isDetailed()) { log.logDetailed( BaseMessages.getString( PKG, "SalesforceConnection.Log.LoginURL", config.getAuthEndpoint())); } if (isRollbackAllChangesOnError()) { // Set the SOAP header to rollback all changes // unless all records are processed successfully. pConnection.setAllOrNoneHeader(true); } // Attempt the login giving the user feedback if (log.isDetailed()) { log.logDetailed(BaseMessages.getString(PKG, "SalesforceConnection.Log.LoginNow")); log.logDetailed("----------------------------------------->"); log.logDetailed(BaseMessages.getString(PKG, "SalesforceConnection.Log.LoginURL", getURL())); log.logDetailed( BaseMessages.getString(PKG, "SalesforceConnection.Log.LoginUsername", getUsername())); if (getModule() != null) { log.logDetailed( BaseMessages.getString(PKG, "SalesforceConnection.Log.LoginModule", getModule())); } log.logDetailed("<-----------------------------------------"); } // Login this.loginResult = pConnection.login( config.getUsername(), Encr.decryptPasswordOptionallyEncrypted(config.getPassword())); if (log.isDebug()) { log.logDebug( BaseMessages.getString(PKG, "SalesforceConnection.Log.SessionId") + " : " + this.loginResult.getSessionId()); log.logDebug( BaseMessages.getString(PKG, "SalesforceConnection.Log.NewServerURL") + " : " + this.loginResult.getServerUrl()); } // Create a new session header object and set the session id to that // returned by the login pConnection.setSessionHeader(loginResult.getSessionId()); config.setServiceEndpoint(loginResult.getServerUrl()); // Return the user Infos this.userInfo = pConnection.getUserInfo(); if (log.isDebug()) { log.logDebug( BaseMessages.getString(PKG, "SalesforceConnection.Log.UserInfos") + " : " + this.userInfo.getUserFullName()); log.logDebug("----------------------------------------->"); log.logDebug( BaseMessages.getString(PKG, "SalesforceConnection.Log.UserName") + " : " + this.userInfo.getUserFullName()); log.logDebug( BaseMessages.getString(PKG, "SalesforceConnection.Log.UserEmail") + " : " + this.userInfo.getUserEmail()); log.logDebug( BaseMessages.getString(PKG, "SalesforceConnection.Log.UserLanguage") + " : " + this.userInfo.getUserLanguage()); log.logDebug( BaseMessages.getString(PKG, "SalesforceConnection.Log.UserOrganization") + " : " + this.userInfo.getOrganizationName()); log.logDebug("<-----------------------------------------"); } this.serverTimestamp = pConnection.getServerTimestamp().getTimestamp().getTime(); if (log.isDebug()) { BaseMessages.getString( PKG, "SalesforceConnection.Log.ServerTimestamp", getServerTimestamp()); } if (log.isDetailed()) { log.logDetailed(BaseMessages.getString(PKG, "SalesforceConnection.Log.Connected")); } } catch (LoginFault ex) { // The LoginFault derives from AxisFault ExceptionCode exCode = ex.getExceptionCode(); if (exCode == ExceptionCode.FUNCTIONALITY_NOT_ENABLED || exCode == ExceptionCode.INVALID_CLIENT || exCode == ExceptionCode.INVALID_LOGIN || exCode == ExceptionCode.LOGIN_DURING_RESTRICTED_DOMAIN || exCode == ExceptionCode.LOGIN_DURING_RESTRICTED_TIME || exCode == ExceptionCode.ORG_LOCKED || exCode == ExceptionCode.PASSWORD_LOCKOUT || exCode == ExceptionCode.SERVER_UNAVAILABLE || exCode == ExceptionCode.TRIAL_EXPIRED || exCode == ExceptionCode.UNSUPPORTED_CLIENT) { throw new HopException( BaseMessages.getString(PKG, "SalesforceConnection.Error.InvalidUsernameOrPassword")); } throw new HopException( BaseMessages.getString(PKG, "SalesforceConnection.Error.Connection"), ex); } catch (Exception e) { throw new HopException( BaseMessages.getString(PKG, "SalesforceConnection.Error.Connection"), e); } } public void query(boolean specifyQuery) throws HopException { if (getBinding() == null) { throw new HopException( BaseMessages.getString(PKG, "SalesforceConnection.Exception.CanNotGetBiding")); } try { if (!specifyQuery) { // check if we can query this Object DescribeSObjectResult describeSObjectResult = getBinding().describeSObject(getModule()); if (describeSObjectResult == null) { throw new HopException( BaseMessages.getString(PKG, "SalesforceConnection.ErrorGettingObject")); } if (!describeSObjectResult.isQueryable()) { throw new HopException( BaseMessages.getString(PKG, "SalesforceConnection.ObjectNotQueryable", module)); } if (this.recordsFilter == SalesforceConnectionUtils.RECORDS_FILTER_UPDATED || this.recordsFilter == SalesforceConnectionUtils.RECORDS_FILTER_DELETED) { // The object must be replicateable if (!describeSObjectResult.isReplicateable()) { throw new HopException( BaseMessages.getString( PKG, "SalesforceConnection.Error.ObjectNotReplicable", getModule())); } } } if (getSQL() != null && log.isDetailed()) { log.logDetailed( BaseMessages.getString(PKG, "SalesforceConnection.Log.SQLString") + " : " + getSQL()); } switch (this.recordsFilter) { case SalesforceConnectionUtils.RECORDS_FILTER_UPDATED: // Updated records ... GetUpdatedResult updatedRecords = getBinding().getUpdated(getModule(), this.startDate, this.endDate); if (updatedRecords.getIds() != null) { int nr = updatedRecords.getIds().length; if (nr > 0) { String[] ids = updatedRecords.getIds(); // We can pass a maximum of 2000 object IDs if (nr > SalesforceConnectionUtils.MAX_UPDATED_OBJECTS_IDS) { this.sObjects = new SObject[nr]; List<String> list = new ArrayList<>(); int desPos = 0; for (int i = 0; i < nr; i++) { list.add(updatedRecords.getIds()[i]); if (i % SalesforceConnectionUtils.MAX_UPDATED_OBJECTS_IDS == 0 || i == nr - 1) { SObject[] s = getBinding() .retrieve( this.fieldsList, getModule(), list.toArray(new String[list.size()])); System.arraycopy(s, 0, this.sObjects, desPos, s.length); desPos += s.length; s = null; list = new ArrayList<>(); } } } else { this.sObjects = getBinding().retrieve(this.fieldsList, getModule(), ids); } if (this.sObjects != null) { this.queryResultSize = this.sObjects.length; } } } break; case SalesforceConnectionUtils.RECORDS_FILTER_DELETED: // Deleted records ... GetDeletedResult deletedRecordsResult = getBinding().getDeleted(getModule(), this.startDate, this.endDate); DeletedRecord[] deletedRecords = deletedRecordsResult.getDeletedRecords(); if (log.isDebug()) { log.logDebug( toString(), BaseMessages.getString( PKG, "SalesforceConnection.DeletedRecordsFound", String.valueOf(deletedRecords == null ? 0 : deletedRecords.length))); } if (deletedRecords != null && deletedRecords.length > 0) { getDeletedList = new HashMap<>(); for (DeletedRecord dr : deletedRecords) { getDeletedList.put(dr.getId(), dr.getDeletedDate().getTime()); } this.qr = getBinding().queryAll(getSQL()); this.sObjects = getQueryResult().getRecords(); if (this.sObjects != null) { this.queryResultSize = this.sObjects.length; } } break; default: // return query result this.qr = isQueryAll() ? getBinding().queryAll(getSQL()) : getBinding().query(getSQL()); this.sObjects = getQueryResult().getRecords(); this.queryResultSize = getQueryResult().getSize(); break; } if (this.sObjects != null) { this.recordsCount = this.sObjects.length; } } catch (Exception e) { log.logError(Const.getStackTracker(e)); throw new HopException( BaseMessages.getString(PKG, "SalesforceConnection.Exception.Query"), e); } } public void close() throws HopException { try { if (!getQueryResult().isDone()) { this.qr.setDone(true); this.qr = null; } if (this.sObjects != null) { this.sObjects = null; } if (this.binding != null) { this.binding = null; } if (this.loginResult != null) { this.loginResult = null; } if (this.userInfo != null) { this.userInfo = null; } if (this.getDeletedList != null) { getDeletedList.clear(); getDeletedList = null; } if (log.isDetailed()) { log.logDetailed(BaseMessages.getString(PKG, "SalesforceConnection.Log.ConnectionClosed")); } } catch (Exception e) { throw new HopException( BaseMessages.getString(PKG, "SalesforceConnection.Error.ClosingConnection"), e); } } public int getQueryResultSize() { return this.queryResultSize; } public int getRecordsCount() { return this.recordsCount; } public SalesforceRecordValue getRecord(int recordIndex) { int index = recordIndex; SObject con = this.sObjects[index]; SalesforceRecordValue retval = new SalesforceRecordValue(index); if (con == null) { return null; } if (this.recordsFilter == SalesforceConnectionUtils.RECORDS_FILTER_DELETED) { // Special case from deleted records // We need to compare each record with the deleted ids // in getDeletedList if (getDeletedList.containsKey(con.getId())) { // this record was deleted in the specified range datetime // We will return it retval.setRecordValue(con); retval.setDeletionDate(getDeletedList.get(con.getId())); } else if (index < getRecordsCount() - 1) { // this record was not deleted in the range datetime // let's move forward and see if we find records that might interest us while (con != null && index < getRecordsCount() - 1 && !getDeletedList.containsKey(con.getId())) { // still not a record for us !!! // let's continue ... index++; con = this.sObjects[index]; } // if we are here, it means that // we found a record to take // or we have fetched all available records retval.setRecordIndexChanges(true); retval.setRecordIndex(index); if (con != null && getChildren(con)[index] != null && getDeletedList.containsKey(con.getId())) { retval.setRecordValue(con); retval.setDeletionDate(getDeletedList.get(con.getId())); } } retval.setAllRecordsProcessed(index >= getRecordsCount() - 1); } else { // Case for retrieving record also for updated records retval.setRecordValue(con); } return retval; } public String getRecordValue(SObject con, String fieldname) throws HopException { String[] fieldHierarchy = fieldname.split("\\."); if (con == null) { return null; } else { XmlObject element = getMessageElementForHierarchy(con, fieldHierarchy); if (element != null) { Object object = element.getValue(); if (object != null) { if (object instanceof QueryResult queryResult) { return buildJsonQueryResult(queryResult); } return String.valueOf(object); } else { return (String) element.getValue(); } } } return null; } /** * Drill down the SObject hierarchy based on the given field hierarchy until either null or the * correct MessageElement is found */ private XmlObject getMessageElementForHierarchy(SObject con, String[] fieldHierarchy) { final int lastIndex = fieldHierarchy.length - 1; SObject currentSObject = con; for (int index = 0; index <= lastIndex; index++) { for (XmlObject element : getChildren(currentSObject)) { if (element.getName().getLocalPart().equals(fieldHierarchy[index])) { if (index == lastIndex) { return element; } else { if (element instanceof SObject sObject) { // Found the next level, keep going currentSObject = sObject; } break; } } } } return null; } private String buildJsonQueryResult(QueryResult queryResult) throws HopException { JSONArray list = new JSONArray(); for (SObject sobject : queryResult.getRecords()) { list.add(buildJSONSObject(sobject)); } StringWriter sw = new StringWriter(); try { list.writeJSONString(sw); } catch (IOException e) { throw new HopException(e); } return sw.toString(); } private JSONObject buildJSONSObject(SObject sobject) { JSONObject jsonObject = new JSONObject(); for (XmlObject element : getChildren(sobject)) { Object object = element.getValue(); if (object != null && object instanceof SObject sObject) { jsonObject.put(element.getName(), buildJSONSObject(sObject)); } else { jsonObject.put(element.getName(), element.getValue()); } } return jsonObject; } // Get SOQL meta data (not a Good way but i don't see any other way !) // TODO : Go back to this one // I am sure there is an easy way to return meta for a SOQL result public XmlObject[] getElements() throws Exception { XmlObject[] result = null; // Query first this.qr = getBinding().query(getSQL()); // and then return records if (this.qr.getSize() > 0) { SObject con = getQueryResult().getRecords()[0]; if (con != null) { result = getChildren(con); } } return result; } public boolean queryMore() throws HopException { try { // check the done attribute on the QueryResult and call QueryMore // with the QueryLocator if there are more records to be retrieved if (!getQueryResult().isDone()) { this.qr = getBinding().queryMore(getQueryResult().getQueryLocator()); this.sObjects = getQueryResult().getRecords(); if (this.sObjects != null) { this.recordsCount = this.sObjects.length; } this.queryResultSize = getQueryResult().getSize(); return true; } else { // Query is done .. we finished ! return false; } } catch (Exception e) { throw new HopException( BaseMessages.getString(PKG, "SalesforceConnection.Error.QueringMore"), e); } } public String[] getAllAvailableObjects(boolean onlyQueryableObjects) throws HopException { DescribeGlobalResult dgr = null; List<String> objects = null; DescribeGlobalSObjectResult[] sobjectResults = null; try { // Get object dgr = getBinding().describeGlobal(); // let's get all objects sobjectResults = dgr.getSobjects(); int nrObjects = dgr.getSobjects().length; objects = new ArrayList<>(); for (int i = 0; i < nrObjects; i++) { DescribeGlobalSObjectResult o = dgr.getSobjects()[i]; if ((onlyQueryableObjects && o.isQueryable()) || !onlyQueryableObjects) { objects.add(o.getName()); } } return objects.toArray(new String[objects.size()]); } catch (Exception e) { throw new HopException( BaseMessages.getString(PKG, "SalesforceConnection.Error.GettingModules"), e); } finally { if (dgr != null) { dgr = null; } if (objects != null) { objects.clear(); objects = null; } if (sobjectResults != null) { sobjectResults = null; } } } public Field[] getObjectFields(String objectName) throws HopException { DescribeSObjectResult describeSObjectResult = null; try { // Get object describeSObjectResult = getBinding().describeSObject(objectName); if (describeSObjectResult == null) { return null; } if (!describeSObjectResult.isQueryable()) { throw new HopException( BaseMessages.getString(PKG, "SalesforceConnection.ObjectNotQueryable", this.module)); } else { // we can query this object return describeSObjectResult.getFields(); } } catch (Exception e) { throw new HopException( BaseMessages.getString( PKG, "SalesforceConnection.Error.GettingModuleFields", this.module), e); } finally { if (describeSObjectResult != null) { describeSObjectResult = null; } } } /** * Returns only updatable object fields and ID field if <b>excludeNonUpdatableFields</b> is true, * otherwise all object field * * @param objectName the name of Saleforce object * @param excludeNonUpdatableFields the flag that indicates if non-updatable fields should be * excluded or not * @return the list of object fields depending on filter or not non-updatable fields. * @throws HopException if any exception occurs */ public Field[] getObjectFields(String objectName, boolean excludeNonUpdatableFields) throws HopException { Field[] fieldList = getObjectFields(objectName); if (excludeNonUpdatableFields) { ArrayList<Field> finalFieldList = new ArrayList<>(); for (Field f : fieldList) { // Leave out fields that can't be updated but if (isIdField(f) || !f.isCalculated() && f.isUpdateable()) { finalFieldList.add(f); } } fieldList = finalFieldList.toArray(new Field[finalFieldList.size()]); } return fieldList; } private boolean isIdField(Field field) { return field.getType() == ID_FIELD_TYPE ? true : false; } private boolean isReferenceField(Field field) { return field.getType() == REFERENCE_FIELD_TYPE ? true : false; } /** * Method returns specified object's fields' names, use #getObjectFields to get fields itself * * @param objectName object name * @return fields' names * @throws HopException in case of error * @see #getObjectFields(String) */ public String[] getFields(String objectName) throws HopException { return getFields(getObjectFields(objectName)); } /** * Method returns specified object's fields' names, use #getObjectFields to get fields itself * * @param objectName object name * @param excludeNonUpdatableFields the flag that indicates if non-updatable fields should be * excluded or not * @return fields' names * @throws HopException in case of error */ public String[] getFields(String objectName, boolean excludeNonUpdatableFields) throws HopException { return getFields( getObjectFields(objectName, excludeNonUpdatableFields), excludeNonUpdatableFields); } /** * Method returns names of the fields specified. * * @param fields fields * @return fields' names * @throws HopException in case of error * @see #getObjectFields(String) */ public String[] getFields(Field[] fields) throws HopException { if (fields != null) { int nrFields = fields.length; String[] fieldsMapp = new String[nrFields]; for (int i = 0; i < nrFields; i++) { Field field = fields[i]; fieldsMapp[i] = field.getName(); } return fieldsMapp; } return null; } /** * Method returns names of the fields specified.<br> * For the type='reference' it also returns name in the <code> * format: objectReferenceTo:externalIdField/lookupField</code> * * @param fields fields * @param excludeNonUpdatableFields the flag that indicates if non-updatable fields should be * excluded or not * @return fields' names * @throws HopException */ public String[] getFields(Field[] fields, boolean excludeNonUpdatableFields) throws HopException { if (fields != null) { ArrayList<String> fieldsList = new ArrayList<>(fields.length); for (Field field : fields) { // Add the name of the field - always fieldsList.add(field.getName()); // Get the referenced to the field object and for this object get all its field to find // possible idLookup fields if (isReferenceField(field)) { String referenceTo = field.getReferenceTo()[0]; Field[] referenceObjectFields = this.getObjectFields(referenceTo, excludeNonUpdatableFields); for (Field f : referenceObjectFields) { if (f.isIdLookup() && !isIdField(f)) { fieldsList.add( String.format("%s:%s/%s", referenceTo, f.getName(), field.getRelationshipName())); } } } } return fieldsList.toArray(new String[fieldsList.size()]); } return null; } public UpsertResult[] upsert(String upsertField, SObject[] sfBuffer) throws HopException { try { return getBinding().upsert(upsertField, sfBuffer); } catch (Exception e) { throw new HopException(BaseMessages.getString(PKG, "SalesforceConnection.ErrorUpsert", e)); } } public SaveResult[] insert(SObject[] sfBuffer) throws HopException { try { List<SObject> normalizedSfBuffer = new ArrayList<>(); for (SObject part : sfBuffer) { if (part != null) { normalizedSfBuffer.add(part); } } return getBinding() .create(normalizedSfBuffer.toArray(new SObject[normalizedSfBuffer.size()])); } catch (Exception e) { throw new HopException(BaseMessages.getString(PKG, "SalesforceConnection.ErrorInsert", e)); } } public SaveResult[] update(SObject[] sfBuffer) throws HopException { try { return getBinding().update(sfBuffer); } catch (Exception e) { throw new HopException(BaseMessages.getString(PKG, "SalesforceConnection.ErrorUpdate", e)); } } public DeleteResult[] delete(String[] id) throws HopException { try { return getBinding().delete(id); } catch (Exception e) { throw new HopException(BaseMessages.getString(PKG, "SalesforceConnection.ErrorDelete", e)); } } public static XmlObject createMessageElement(String name, Object value, boolean useExternalKey) throws Exception { XmlObject me = null; if (useExternalKey) { // We use an external key // the structure should be like this : // object:externalId/lookupField // where // object is the type of the object // externalId is the name of the field in the object to resolve the value // lookupField is the name of the field in the current object to update (is the "__r" version) int indexOfType = name.indexOf(":"); if (indexOfType > 0) { String type = name.substring(0, indexOfType); String extIdName = null; String lookupField = null; String rest = name.substring(indexOfType + 1); int indexOfExtId = rest.indexOf("/"); if (indexOfExtId > 0) { extIdName = rest.substring(0, indexOfExtId); lookupField = rest.substring(indexOfExtId + 1); } else { extIdName = rest; lookupField = extIdName; } me = createForeignKeyElement(type, lookupField, extIdName, value); } else { throw new HopException( BaseMessages.getString(PKG, "SalesforceConnection.UnableToFindObjectType")); } } else { me = fromTemplateElement(name, value, true); } return me; } private static XmlObject createForeignKeyElement( String type, String lookupField, String extIdName, Object extIdValue) throws Exception { // Foreign key relationship to the object XmlObject me = fromTemplateElement(lookupField, null, false); me.addField("type", type); me.addField(extIdName, extIdValue); return me; } public static XmlObject fromTemplateElement(String name, Object value, boolean setValue) throws SOAPException { // Use the TEMPLATE org.w3c.dom.Element to create new Message Elements XmlObject me = new XmlObject(); if (setValue) { me.setValue(value); } me.setName(new QName(name)); return me; } public static XmlObject[] getChildren(SObject object) { List<String> reservedFieldNames = Arrays.asList("type", "fieldsToNull"); if (object == null) { return null; } List<XmlObject> children = new ArrayList<>(); Iterator<XmlObject> iterator = object.getChildren(); while (iterator.hasNext()) { XmlObject child = iterator.next(); if (child.getName().getNamespaceURI().equals(Constants.PARTNER_SOBJECT_NS) && reservedFieldNames.contains(child.getName().getLocalPart())) { continue; } children.add(child); } if (children.isEmpty()) { return null; } return children.toArray(new XmlObject[children.size()]); } }
googleapis/google-cloud-java
35,468
java-dialogflow/google-cloud-dialogflow/src/main/java/com/google/cloud/dialogflow/v2/stub/IntentsStubSettings.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.dialogflow.v2.stub; import static com.google.cloud.dialogflow.v2.IntentsClient.ListIntentsPagedResponse; import static com.google.cloud.dialogflow.v2.IntentsClient.ListLocationsPagedResponse; import com.google.api.core.ApiFunction; import com.google.api.core.ApiFuture; import com.google.api.core.BetaApi; import com.google.api.core.ObsoleteApi; import com.google.api.gax.core.GaxProperties; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.GrpcTransportChannel; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.grpc.ProtoOperationTransformers; import com.google.api.gax.httpjson.GaxHttpJsonProperties; import com.google.api.gax.httpjson.HttpJsonTransportChannel; import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider; import com.google.api.gax.longrunning.OperationSnapshot; import com.google.api.gax.longrunning.OperationTimedPollAlgorithm; import com.google.api.gax.retrying.RetrySettings; import com.google.api.gax.rpc.ApiCallContext; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallSettings; import com.google.api.gax.rpc.PageContext; import com.google.api.gax.rpc.PagedCallSettings; import com.google.api.gax.rpc.PagedListDescriptor; import com.google.api.gax.rpc.PagedListResponseFactory; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.dialogflow.v2.BatchDeleteIntentsRequest; import com.google.cloud.dialogflow.v2.BatchUpdateIntentsRequest; import com.google.cloud.dialogflow.v2.BatchUpdateIntentsResponse; import com.google.cloud.dialogflow.v2.CreateIntentRequest; import com.google.cloud.dialogflow.v2.DeleteIntentRequest; import com.google.cloud.dialogflow.v2.GetIntentRequest; import com.google.cloud.dialogflow.v2.Intent; import com.google.cloud.dialogflow.v2.ListIntentsRequest; import com.google.cloud.dialogflow.v2.ListIntentsResponse; import com.google.cloud.dialogflow.v2.UpdateIntentRequest; import com.google.cloud.location.GetLocationRequest; import com.google.cloud.location.ListLocationsRequest; import com.google.cloud.location.ListLocationsResponse; import com.google.cloud.location.Location; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.longrunning.Operation; import com.google.protobuf.Empty; import com.google.protobuf.Struct; import java.io.IOException; import java.time.Duration; import java.util.List; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link IntentsStub}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (dialogflow.googleapis.com) and default port (443) are used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the * [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings) * of getIntent: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * IntentsStubSettings.Builder intentsSettingsBuilder = IntentsStubSettings.newBuilder(); * intentsSettingsBuilder * .getIntentSettings() * .setRetrySettings( * intentsSettingsBuilder * .getIntentSettings() * .getRetrySettings() * .toBuilder() * .setInitialRetryDelayDuration(Duration.ofSeconds(1)) * .setInitialRpcTimeoutDuration(Duration.ofSeconds(5)) * .setMaxAttempts(5) * .setMaxRetryDelayDuration(Duration.ofSeconds(30)) * .setMaxRpcTimeoutDuration(Duration.ofSeconds(60)) * .setRetryDelayMultiplier(1.3) * .setRpcTimeoutMultiplier(1.5) * .setTotalTimeoutDuration(Duration.ofSeconds(300)) * .build()); * IntentsStubSettings intentsSettings = intentsSettingsBuilder.build(); * }</pre> * * Please refer to the [Client Side Retry * Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for * additional support in setting retries. * * <p>To configure the RetrySettings of a Long Running Operation method, create an * OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to * configure the RetrySettings for batchUpdateIntents: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * IntentsStubSettings.Builder intentsSettingsBuilder = IntentsStubSettings.newBuilder(); * TimedRetryAlgorithm timedRetryAlgorithm = * OperationalTimedPollAlgorithm.create( * RetrySettings.newBuilder() * .setInitialRetryDelayDuration(Duration.ofMillis(500)) * .setRetryDelayMultiplier(1.5) * .setMaxRetryDelayDuration(Duration.ofMillis(5000)) * .setTotalTimeoutDuration(Duration.ofHours(24)) * .build()); * intentsSettingsBuilder * .createClusterOperationSettings() * .setPollingAlgorithm(timedRetryAlgorithm) * .build(); * }</pre> */ @Generated("by gapic-generator-java") public class IntentsStubSettings extends StubSettings<IntentsStubSettings> { /** The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder() .add("https://www.googleapis.com/auth/cloud-platform") .add("https://www.googleapis.com/auth/dialogflow") .build(); private final PagedCallSettings<ListIntentsRequest, ListIntentsResponse, ListIntentsPagedResponse> listIntentsSettings; private final UnaryCallSettings<GetIntentRequest, Intent> getIntentSettings; private final UnaryCallSettings<CreateIntentRequest, Intent> createIntentSettings; private final UnaryCallSettings<UpdateIntentRequest, Intent> updateIntentSettings; private final UnaryCallSettings<DeleteIntentRequest, Empty> deleteIntentSettings; private final UnaryCallSettings<BatchUpdateIntentsRequest, Operation> batchUpdateIntentsSettings; private final OperationCallSettings<BatchUpdateIntentsRequest, BatchUpdateIntentsResponse, Struct> batchUpdateIntentsOperationSettings; private final UnaryCallSettings<BatchDeleteIntentsRequest, Operation> batchDeleteIntentsSettings; private final OperationCallSettings<BatchDeleteIntentsRequest, Empty, Struct> batchDeleteIntentsOperationSettings; private final PagedCallSettings< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings; private final UnaryCallSettings<GetLocationRequest, Location> getLocationSettings; private static final PagedListDescriptor<ListIntentsRequest, ListIntentsResponse, Intent> LIST_INTENTS_PAGE_STR_DESC = new PagedListDescriptor<ListIntentsRequest, ListIntentsResponse, Intent>() { @Override public String emptyToken() { return ""; } @Override public ListIntentsRequest injectToken(ListIntentsRequest payload, String token) { return ListIntentsRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListIntentsRequest injectPageSize(ListIntentsRequest payload, int pageSize) { return ListIntentsRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(ListIntentsRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListIntentsResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<Intent> extractResources(ListIntentsResponse payload) { return payload.getIntentsList(); } }; private static final PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location> LIST_LOCATIONS_PAGE_STR_DESC = new PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location>() { @Override public String emptyToken() { return ""; } @Override public ListLocationsRequest injectToken(ListLocationsRequest payload, String token) { return ListLocationsRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListLocationsRequest injectPageSize(ListLocationsRequest payload, int pageSize) { return ListLocationsRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(ListLocationsRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListLocationsResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<Location> extractResources(ListLocationsResponse payload) { return payload.getLocationsList(); } }; private static final PagedListResponseFactory< ListIntentsRequest, ListIntentsResponse, ListIntentsPagedResponse> LIST_INTENTS_PAGE_STR_FACT = new PagedListResponseFactory< ListIntentsRequest, ListIntentsResponse, ListIntentsPagedResponse>() { @Override public ApiFuture<ListIntentsPagedResponse> getFuturePagedResponse( UnaryCallable<ListIntentsRequest, ListIntentsResponse> callable, ListIntentsRequest request, ApiCallContext context, ApiFuture<ListIntentsResponse> futureResponse) { PageContext<ListIntentsRequest, ListIntentsResponse, Intent> pageContext = PageContext.create(callable, LIST_INTENTS_PAGE_STR_DESC, request, context); return ListIntentsPagedResponse.createAsync(pageContext, futureResponse); } }; private static final PagedListResponseFactory< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> LIST_LOCATIONS_PAGE_STR_FACT = new PagedListResponseFactory< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>() { @Override public ApiFuture<ListLocationsPagedResponse> getFuturePagedResponse( UnaryCallable<ListLocationsRequest, ListLocationsResponse> callable, ListLocationsRequest request, ApiCallContext context, ApiFuture<ListLocationsResponse> futureResponse) { PageContext<ListLocationsRequest, ListLocationsResponse, Location> pageContext = PageContext.create(callable, LIST_LOCATIONS_PAGE_STR_DESC, request, context); return ListLocationsPagedResponse.createAsync(pageContext, futureResponse); } }; /** Returns the object with the settings used for calls to listIntents. */ public PagedCallSettings<ListIntentsRequest, ListIntentsResponse, ListIntentsPagedResponse> listIntentsSettings() { return listIntentsSettings; } /** Returns the object with the settings used for calls to getIntent. */ public UnaryCallSettings<GetIntentRequest, Intent> getIntentSettings() { return getIntentSettings; } /** Returns the object with the settings used for calls to createIntent. */ public UnaryCallSettings<CreateIntentRequest, Intent> createIntentSettings() { return createIntentSettings; } /** Returns the object with the settings used for calls to updateIntent. */ public UnaryCallSettings<UpdateIntentRequest, Intent> updateIntentSettings() { return updateIntentSettings; } /** Returns the object with the settings used for calls to deleteIntent. */ public UnaryCallSettings<DeleteIntentRequest, Empty> deleteIntentSettings() { return deleteIntentSettings; } /** Returns the object with the settings used for calls to batchUpdateIntents. */ public UnaryCallSettings<BatchUpdateIntentsRequest, Operation> batchUpdateIntentsSettings() { return batchUpdateIntentsSettings; } /** Returns the object with the settings used for calls to batchUpdateIntents. */ public OperationCallSettings<BatchUpdateIntentsRequest, BatchUpdateIntentsResponse, Struct> batchUpdateIntentsOperationSettings() { return batchUpdateIntentsOperationSettings; } /** Returns the object with the settings used for calls to batchDeleteIntents. */ public UnaryCallSettings<BatchDeleteIntentsRequest, Operation> batchDeleteIntentsSettings() { return batchDeleteIntentsSettings; } /** Returns the object with the settings used for calls to batchDeleteIntents. */ public OperationCallSettings<BatchDeleteIntentsRequest, Empty, Struct> batchDeleteIntentsOperationSettings() { return batchDeleteIntentsOperationSettings; } /** Returns the object with the settings used for calls to listLocations. */ public PagedCallSettings<ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings() { return listLocationsSettings; } /** Returns the object with the settings used for calls to getLocation. */ public UnaryCallSettings<GetLocationRequest, Location> getLocationSettings() { return getLocationSettings; } public IntentsStub createStub() throws IOException { if (getTransportChannelProvider() .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcIntentsStub.create(this); } if (getTransportChannelProvider() .getTransportName() .equals(HttpJsonTransportChannel.getHttpJsonTransportName())) { return HttpJsonIntentsStub.create(this); } throw new UnsupportedOperationException( String.format( "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns the default service name. */ @Override public String getServiceName() { return "dialogflow"; } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstantiatingExecutorProvider.newBuilder(); } /** Returns the default service endpoint. */ @ObsoleteApi("Use getEndpoint() instead") public static String getDefaultEndpoint() { return "dialogflow.googleapis.com:443"; } /** Returns the default mTLS service endpoint. */ public static String getDefaultMtlsEndpoint() { return "dialogflow.mtls.googleapis.com:443"; } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return GoogleCredentialsProvider.newBuilder() .setScopesToApply(DEFAULT_SERVICE_SCOPES) .setUseJwtAccessWithScope(true); } /** Returns a builder for the default gRPC ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return InstantiatingGrpcChannelProvider.newBuilder() .setMaxInboundMessageSize(Integer.MAX_VALUE); } /** Returns a builder for the default REST ChannelProvider for this service. */ @BetaApi public static InstantiatingHttpJsonChannelProvider.Builder defaultHttpJsonTransportProviderBuilder() { return InstantiatingHttpJsonChannelProvider.newBuilder(); } public static TransportChannelProvider defaultTransportChannelProvider() { return defaultGrpcTransportProviderBuilder().build(); } public static ApiClientHeaderProvider.Builder defaultGrpcApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken("gapic", GaxProperties.getLibraryVersion(IntentsStubSettings.class)) .setTransportToken( GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion()); } public static ApiClientHeaderProvider.Builder defaultHttpJsonApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken("gapic", GaxProperties.getLibraryVersion(IntentsStubSettings.class)) .setTransportToken( GaxHttpJsonProperties.getHttpJsonTokenName(), GaxHttpJsonProperties.getHttpJsonVersion()); } public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return IntentsStubSettings.defaultGrpcApiClientHeaderProviderBuilder(); } /** Returns a new gRPC builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new REST builder for this class. */ public static Builder newHttpJsonBuilder() { return Builder.createHttpJsonDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected IntentsStubSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); listIntentsSettings = settingsBuilder.listIntentsSettings().build(); getIntentSettings = settingsBuilder.getIntentSettings().build(); createIntentSettings = settingsBuilder.createIntentSettings().build(); updateIntentSettings = settingsBuilder.updateIntentSettings().build(); deleteIntentSettings = settingsBuilder.deleteIntentSettings().build(); batchUpdateIntentsSettings = settingsBuilder.batchUpdateIntentsSettings().build(); batchUpdateIntentsOperationSettings = settingsBuilder.batchUpdateIntentsOperationSettings().build(); batchDeleteIntentsSettings = settingsBuilder.batchDeleteIntentsSettings().build(); batchDeleteIntentsOperationSettings = settingsBuilder.batchDeleteIntentsOperationSettings().build(); listLocationsSettings = settingsBuilder.listLocationsSettings().build(); getLocationSettings = settingsBuilder.getLocationSettings().build(); } /** Builder for IntentsStubSettings. */ public static class Builder extends StubSettings.Builder<IntentsStubSettings, Builder> { private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders; private final PagedCallSettings.Builder< ListIntentsRequest, ListIntentsResponse, ListIntentsPagedResponse> listIntentsSettings; private final UnaryCallSettings.Builder<GetIntentRequest, Intent> getIntentSettings; private final UnaryCallSettings.Builder<CreateIntentRequest, Intent> createIntentSettings; private final UnaryCallSettings.Builder<UpdateIntentRequest, Intent> updateIntentSettings; private final UnaryCallSettings.Builder<DeleteIntentRequest, Empty> deleteIntentSettings; private final UnaryCallSettings.Builder<BatchUpdateIntentsRequest, Operation> batchUpdateIntentsSettings; private final OperationCallSettings.Builder< BatchUpdateIntentsRequest, BatchUpdateIntentsResponse, Struct> batchUpdateIntentsOperationSettings; private final UnaryCallSettings.Builder<BatchDeleteIntentsRequest, Operation> batchDeleteIntentsSettings; private final OperationCallSettings.Builder<BatchDeleteIntentsRequest, Empty, Struct> batchDeleteIntentsOperationSettings; private final PagedCallSettings.Builder< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings; private final UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings; private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions = ImmutableMap.builder(); definitions.put( "retry_policy_0_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList(StatusCode.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder(); RetrySettings settings = null; settings = RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(100L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelayDuration(Duration.ofMillis(60000L)) .setInitialRpcTimeoutDuration(Duration.ofMillis(60000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ofMillis(60000L)) .setTotalTimeoutDuration(Duration.ofMillis(60000L)) .build(); definitions.put("retry_policy_0_params", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(clientContext); listIntentsSettings = PagedCallSettings.newBuilder(LIST_INTENTS_PAGE_STR_FACT); getIntentSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); createIntentSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); updateIntentSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); deleteIntentSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); batchUpdateIntentsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); batchUpdateIntentsOperationSettings = OperationCallSettings.newBuilder(); batchDeleteIntentsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); batchDeleteIntentsOperationSettings = OperationCallSettings.newBuilder(); listLocationsSettings = PagedCallSettings.newBuilder(LIST_LOCATIONS_PAGE_STR_FACT); getLocationSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( listIntentsSettings, getIntentSettings, createIntentSettings, updateIntentSettings, deleteIntentSettings, batchUpdateIntentsSettings, batchDeleteIntentsSettings, listLocationsSettings, getLocationSettings); initDefaults(this); } protected Builder(IntentsStubSettings settings) { super(settings); listIntentsSettings = settings.listIntentsSettings.toBuilder(); getIntentSettings = settings.getIntentSettings.toBuilder(); createIntentSettings = settings.createIntentSettings.toBuilder(); updateIntentSettings = settings.updateIntentSettings.toBuilder(); deleteIntentSettings = settings.deleteIntentSettings.toBuilder(); batchUpdateIntentsSettings = settings.batchUpdateIntentsSettings.toBuilder(); batchUpdateIntentsOperationSettings = settings.batchUpdateIntentsOperationSettings.toBuilder(); batchDeleteIntentsSettings = settings.batchDeleteIntentsSettings.toBuilder(); batchDeleteIntentsOperationSettings = settings.batchDeleteIntentsOperationSettings.toBuilder(); listLocationsSettings = settings.listLocationsSettings.toBuilder(); getLocationSettings = settings.getLocationSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( listIntentsSettings, getIntentSettings, createIntentSettings, updateIntentSettings, deleteIntentSettings, batchUpdateIntentsSettings, batchDeleteIntentsSettings, listLocationsSettings, getLocationSettings); } private static Builder createDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder createHttpJsonDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultHttpJsonTransportProviderBuilder().build()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultHttpJsonApiClientHeaderProviderBuilder().build()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder initDefaults(Builder builder) { builder .listIntentsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .getIntentSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .createIntentSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .updateIntentSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .deleteIntentSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .batchUpdateIntentsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .batchDeleteIntentsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .listLocationsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .getLocationSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .batchUpdateIntentsOperationSettings() .setInitialCallSettings( UnaryCallSettings .<BatchUpdateIntentsRequest, OperationSnapshot>newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create( BatchUpdateIntentsResponse.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(Struct.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(5000L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelayDuration(Duration.ofMillis(45000L)) .setInitialRpcTimeoutDuration(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ZERO) .setTotalTimeoutDuration(Duration.ofMillis(300000L)) .build())); builder .batchDeleteIntentsOperationSettings() .setInitialCallSettings( UnaryCallSettings .<BatchDeleteIntentsRequest, OperationSnapshot>newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(Empty.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(Struct.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(5000L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelayDuration(Duration.ofMillis(45000L)) .setInitialRpcTimeoutDuration(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ZERO) .setTotalTimeoutDuration(Duration.ofMillis(300000L)) .build())); return builder; } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); return this; } public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() { return unaryMethodSettingsBuilders; } /** Returns the builder for the settings used for calls to listIntents. */ public PagedCallSettings.Builder< ListIntentsRequest, ListIntentsResponse, ListIntentsPagedResponse> listIntentsSettings() { return listIntentsSettings; } /** Returns the builder for the settings used for calls to getIntent. */ public UnaryCallSettings.Builder<GetIntentRequest, Intent> getIntentSettings() { return getIntentSettings; } /** Returns the builder for the settings used for calls to createIntent. */ public UnaryCallSettings.Builder<CreateIntentRequest, Intent> createIntentSettings() { return createIntentSettings; } /** Returns the builder for the settings used for calls to updateIntent. */ public UnaryCallSettings.Builder<UpdateIntentRequest, Intent> updateIntentSettings() { return updateIntentSettings; } /** Returns the builder for the settings used for calls to deleteIntent. */ public UnaryCallSettings.Builder<DeleteIntentRequest, Empty> deleteIntentSettings() { return deleteIntentSettings; } /** Returns the builder for the settings used for calls to batchUpdateIntents. */ public UnaryCallSettings.Builder<BatchUpdateIntentsRequest, Operation> batchUpdateIntentsSettings() { return batchUpdateIntentsSettings; } /** Returns the builder for the settings used for calls to batchUpdateIntents. */ public OperationCallSettings.Builder< BatchUpdateIntentsRequest, BatchUpdateIntentsResponse, Struct> batchUpdateIntentsOperationSettings() { return batchUpdateIntentsOperationSettings; } /** Returns the builder for the settings used for calls to batchDeleteIntents. */ public UnaryCallSettings.Builder<BatchDeleteIntentsRequest, Operation> batchDeleteIntentsSettings() { return batchDeleteIntentsSettings; } /** Returns the builder for the settings used for calls to batchDeleteIntents. */ public OperationCallSettings.Builder<BatchDeleteIntentsRequest, Empty, Struct> batchDeleteIntentsOperationSettings() { return batchDeleteIntentsOperationSettings; } /** Returns the builder for the settings used for calls to listLocations. */ public PagedCallSettings.Builder< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings() { return listLocationsSettings; } /** Returns the builder for the settings used for calls to getLocation. */ public UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings() { return getLocationSettings; } @Override public IntentsStubSettings build() throws IOException { return new IntentsStubSettings(this); } } }
openjdk/jdk8
35,429
hotspot/agent/src/share/classes/sun/jvm/hotspot/debugger/windbg/WindbgCDebugInfoBuilder.java
/* * Copyright (c) 2002, 2003, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. * */ package sun.jvm.hotspot.debugger.windbg; import java.util.*; import sun.jvm.hotspot.debugger.*; import sun.jvm.hotspot.debugger.win32.coff.*; import sun.jvm.hotspot.debugger.cdbg.*; import sun.jvm.hotspot.debugger.cdbg.basic.*; import sun.jvm.hotspot.utilities.Assert; class WindbgCDebugInfoBuilder implements DebugVC50SubsectionTypes, DebugVC50TypeLeafIndices, DebugVC50TypeEnums, DebugVC50SymbolTypes, DebugVC50MemberAttributes, CVAttributes, AccessControl { private WindbgDebugger dbg; private Address base; private DebugVC50 vc50; private BasicCDebugInfoDataBase db; private DebugVC50TypeIterator iter; private DebugVC50SymbolIterator symIter; // Logical->physical segment mapping private COFFFile file; private DebugVC50SSSegMap segMap; // Canonicalization of primitive types private Map primIndexToTypeMap; // Global unnamed enumeration // (FIXME: must figure out how to handle nested type descriptions) private BasicEnumType unnamedEnum; private Stack blockStack; private int endsToSkip; private static final int POINTER_SIZE = 4; WindbgCDebugInfoBuilder(WindbgDebugger dbg) { this.dbg = dbg; } CDebugInfoDataBase buildDataBase(String dllName, Address base) { this.base = base; file = COFFFileParser.getParser().parse(dllName); vc50 = getDebugVC50(file); if (vc50 == null) return null; segMap = getSegMap(); primIndexToTypeMap = new HashMap(); blockStack = new Stack(); endsToSkip = 0; db = new BasicCDebugInfoDataBase(); db.beginConstruction(); // Get global types and add them to the database DebugVC50SSGlobalTypes types = getGlobalTypes(); for (iter = types.getTypeIterator(); !iter.done(); iter.next()) { while (!iter.typeStringDone()) { switch (iter.typeStringLeaf()) { case LF_MODIFIER: { int idx = iter.getModifierIndex(); BasicType target = getTypeByIndex(idx); short windowsMods = iter.getModifierAttribute(); short mods = 0; if ((windowsMods & MODIFIER_CONST_MASK) != 0) mods |= CONST; if ((windowsMods & MODIFIER_VOLATILE_MASK) != 0) mods |= VOLATILE; putType(target.getCVVariant(mods)); break; } case LF_POINTER: { int idx = iter.getPointerType(); BasicType target = getTypeByIndex(idx); short windowsMods = iter.getModifierAttribute(); short mods = 0; if ((windowsMods & POINTER_CONST_MASK) != 0) mods |= CONST; if ((windowsMods & POINTER_VOLATILE_MASK) != 0) mods |= VOLATILE; BasicPointerType ptrType = new BasicPointerType(POINTER_SIZE, target); if (mods != 0) { ptrType = (BasicPointerType) ptrType.getCVVariant(mods); } putType(ptrType); break; } case LF_ARRAY: { BasicType elemType = getTypeByIndex(iter.getArrayElementType()); putType(new BasicArrayType(iter.getArrayName(), elemType, iter.getArrayLength())); break; } case LF_CLASS: case LF_STRUCTURE: { CompoundTypeKind kind = ((iter.typeStringLeaf() == LF_CLASS) ? CompoundTypeKind.CLASS : CompoundTypeKind.STRUCT); BasicCompoundType type = new BasicCompoundType(iter.getClassName(), iter.getClassSize(), kind); // Skip parsing of forward references to types // FIXME: do we have to resolve these later? if ((iter.getClassProperty() & PROPERTY_FWDREF) == 0) { DebugVC50TypeIterator fieldIter = iter.getClassFieldListIterator(); if (Assert.ASSERTS_ENABLED) { Assert.that(fieldIter.typeStringLeaf() == LF_FIELDLIST, "Expected field list"); } boolean advance = false; while (!fieldIter.typeStringDone()) { advance = true; switch (fieldIter.typeStringLeaf()) { case LF_FIELDLIST: break; case LF_BCLASS: { int accessControl = memberAttributeToAccessControl(fieldIter.getBClassAttribute()); Type baseType = getTypeByIndex(fieldIter.getBClassType()); // FIXME: take offset into account type.addBaseClass(new BasicBaseClass(accessControl, false, baseType)); break; } case LF_VBCLASS: { int accessControl = memberAttributeToAccessControl(fieldIter.getVBClassAttribute()); Type baseType = getTypeByIndex(fieldIter.getVBClassBaseClassType()); // FIXME: take offset and virtual base offset into account type.addBaseClass(new BasicBaseClass(accessControl, true, baseType)); break; } // I don't think we need to handle indirect virtual base // classes since they should be handled indirectly through // the modeling of the type hierarchy case LF_IVBCLASS: break; case LF_INDEX: { fieldIter = fieldIter.getIndexIterator(); advance = false; break; } case LF_MEMBER: { BasicField field = new BasicField(fieldIter.getMemberName(), getTypeByIndex(fieldIter.getMemberType()), memberAttributeToAccessControl(fieldIter.getMemberAttribute()), false); field.setOffset(fieldIter.getMemberOffset()); type.addField(field); break; } case LF_STMEMBER: { BasicField field = new BasicField(fieldIter.getStaticName(), getTypeByIndex(fieldIter.getStaticType()), memberAttributeToAccessControl(fieldIter.getStaticAttribute()), true); // The field's address will be found during resolution // of the debug info database type.addField(field); break; } // FIXME: handle methods case LF_METHOD: break; case LF_ONEMETHOD: break; // FIXME: handle nested types case LF_NESTTYPE: break; case LF_NESTTYPEEX: break; // NOTE: virtual functions not needed/handled yet for // this debugging system (because we are not planning to // handle calling methods in the target process at // runtime) case LF_VFUNCTAB: break; case LF_FRIENDCLS: break; case LF_VFUNCOFF: break; case LF_MEMBERMODIFY: break; case LF_PAD0: case LF_PAD1: case LF_PAD2: case LF_PAD3: case LF_PAD4: case LF_PAD5: case LF_PAD6: case LF_PAD7: case LF_PAD8: case LF_PAD9: case LF_PAD10: case LF_PAD11: case LF_PAD12: case LF_PAD13: case LF_PAD14: case LF_PAD15: break; default: System.err.println("WARNING: unexpected leaf index " + fieldIter.typeStringLeaf() + " in field list for type " + iter.getTypeIndex()); } if (advance) { fieldIter.typeStringNext(); } } } putType(type); break; } case LF_UNION: { BasicCompoundType type = new BasicCompoundType(iter.getUnionName(), iter.getUnionSize(), CompoundTypeKind.UNION); // Skip parsing of forward references to types // FIXME: do we have to resolve these later? if ((iter.getClassProperty() & PROPERTY_FWDREF) == 0) { DebugVC50TypeIterator fieldIter = iter.getUnionFieldListIterator(); if (Assert.ASSERTS_ENABLED) { Assert.that(fieldIter.typeStringLeaf() == LF_FIELDLIST, "Expected field list"); } boolean advance = false; while (!fieldIter.typeStringDone()) { advance = true; switch (fieldIter.typeStringLeaf()) { case LF_FIELDLIST: break; case LF_BCLASS: break; case LF_VBCLASS: break; case LF_IVBCLASS: break; case LF_INDEX: { fieldIter = fieldIter.getIndexIterator(); advance = false; break; } case LF_MEMBER: { BasicField field = new BasicField(fieldIter.getMemberName(), getTypeByIndex(fieldIter.getMemberType()), memberAttributeToAccessControl(fieldIter.getMemberAttribute()), false); field.setOffset(fieldIter.getMemberOffset()); type.addField(field); break; } case LF_STMEMBER: { System.err.println("WARNING: I didn't think unions could contain static fields..."); BasicField field = new BasicField(fieldIter.getStaticName(), getTypeByIndex(fieldIter.getStaticType()), memberAttributeToAccessControl(fieldIter.getStaticAttribute()), true); // The field's address will be found during resolution // of the debug info database type.addField(field); break; } case LF_METHOD: break; case LF_ONEMETHOD: break; // FIXME: handle nested types case LF_NESTTYPE: break; case LF_NESTTYPEEX: break; case LF_VFUNCTAB: break; case LF_FRIENDCLS: break; case LF_VFUNCOFF: break; case LF_MEMBERMODIFY: break; case LF_PAD0: case LF_PAD1: case LF_PAD2: case LF_PAD3: case LF_PAD4: case LF_PAD5: case LF_PAD6: case LF_PAD7: case LF_PAD8: case LF_PAD9: case LF_PAD10: case LF_PAD11: case LF_PAD12: case LF_PAD13: case LF_PAD14: case LF_PAD15: break; default: System.err.println("WARNING: unexpected leaf index " + fieldIter.typeStringLeaf() + " in field list for union of type " + iter.getTypeIndex()); } if (advance) { fieldIter.typeStringNext(); } } } putType(type); break; } case LF_ENUM: { String name = iter.getEnumName(); BasicEnumType enumType = null; if ((name == null) || (name.equals(""))) { if (unnamedEnum == null) { unnamedEnum = new BasicEnumType(null, getTypeByIndex(iter.getEnumType())); } enumType = unnamedEnum; } else { enumType = new BasicEnumType(name, getTypeByIndex(iter.getEnumType())); } DebugVC50TypeIterator fieldIter = iter.getEnumFieldListIterator(); if (Assert.ASSERTS_ENABLED) { Assert.that(fieldIter.typeStringLeaf() == LF_FIELDLIST, "Expected field list"); } boolean advance = false; while (!fieldIter.typeStringDone()) { advance = true; switch (fieldIter.typeStringLeaf()) { case LF_FIELDLIST: break; case LF_ENUMERATE: { String enumName = fieldIter.getEnumerateName(); long enumVal = fieldIter.getEnumerateValue(); enumType.addEnum(enumName, enumVal); break; } case LF_INDEX: { fieldIter = fieldIter.getIndexIterator(); advance = false; break; } case LF_PAD0: case LF_PAD1: case LF_PAD2: case LF_PAD3: case LF_PAD4: case LF_PAD5: case LF_PAD6: case LF_PAD7: case LF_PAD8: case LF_PAD9: case LF_PAD10: case LF_PAD11: case LF_PAD12: case LF_PAD13: case LF_PAD14: case LF_PAD15: break; default: System.err.println("WARNING: unexpected leaf index " + fieldIter.typeStringLeaf() + " in field list for enum of type " + iter.getTypeIndex()); } if (advance) { fieldIter.typeStringNext(); } } putType(enumType); break; } case LF_PROCEDURE: { Type retType = getTypeByIndex(iter.getProcedureReturnType()); BasicFunctionType func = new BasicFunctionType(null, POINTER_SIZE, retType); DebugVC50TypeIterator argIter = iter.getProcedureArgumentListIterator(); if (Assert.ASSERTS_ENABLED) { Assert.that(argIter.typeStringLeaf() == LF_ARGLIST, "Expected argument list"); } for (int i = 0; i < argIter.getArgListCount(); i++) { func.addArgumentType(getTypeByIndex(argIter.getArgListType(i))); } putType(func); break; } case LF_MFUNCTION: { Type retType = getTypeByIndex(iter.getMFunctionReturnType()); Type container = getTypeByIndex(iter.getMFunctionContainingClass()); Type thisType = getTypeByIndex(iter.getMFunctionThis()); long thisAdjust = iter.getMFunctionThisAdjust(); BasicMemberFunctionType func = new BasicMemberFunctionType(null, POINTER_SIZE, retType, container, thisType, thisAdjust); DebugVC50TypeIterator argIter = iter.getMFunctionArgumentListIterator(); for (int i = 0; i < argIter.getArgListCount(); i++) { func.addArgumentType(getTypeByIndex(argIter.getArgListType(i))); } putType(func); break; } // FIXME: handle virtual function table shape description case LF_VTSHAPE: break; case LF_BARRAY: System.err.println("FIXME: don't know what to do with LF_BARRAY leaves (convert to pointers?"); break; case LF_LABEL: break; case LF_NULL: break; // FIXME: do we need to handle this? With what? case LF_DIMARRAY: System.err.println("FIXME: don't know what to do with LF_DIMARRAY leaves yet"); break; case LF_VFTPATH: break; case LF_PRECOMP: break; case LF_ENDPRECOMP: break; case LF_OEM: break; case LF_TYPESERVER: break; // Type records referenced from other type records case LF_SKIP: break; case LF_ARGLIST: skipTypeRecord(); break; case LF_DEFARG: System.err.println("FIXME: handle default arguments (dereference the type)"); break; case LF_FIELDLIST: skipTypeRecord(); break; case LF_DERIVED: break; case LF_BITFIELD: { Type underlyingType = getTypeByIndex(iter.getBitfieldFieldType()); BasicBitType bit = new BasicBitType(underlyingType, (iter.getBitfieldLength() & 0xFF), (iter.getBitfieldPosition() & 0xFF)); putType(bit); break; } case LF_METHODLIST: break; case LF_DIMCONU: case LF_DIMCONLU: case LF_DIMVARU: case LF_DIMVARLU: break; case LF_REFSYM: break; case LF_PAD0: case LF_PAD1: case LF_PAD2: case LF_PAD3: case LF_PAD4: case LF_PAD5: case LF_PAD6: case LF_PAD7: case LF_PAD8: case LF_PAD9: case LF_PAD10: case LF_PAD11: case LF_PAD12: case LF_PAD13: case LF_PAD14: case LF_PAD15: break; default: { System.err.println("Unexpected leaf index " + iter.typeStringLeaf() + " at offset 0x" + Integer.toHexString(iter.typeStringOffset())); break; } } if (!iter.typeStringDone()) { iter.typeStringNext(); } } } // Add all symbol directories to debug info // (FIXME: must figure out how to handle module-by-module // arrangement of at least the static symbols to have proper // lookup -- should probably also take advantage of the PROCREF // and UDT references to understand how to build the global // database vs. the module-by-module one) DebugVC50SubsectionDirectory dir = vc50.getSubsectionDirectory(); int moduleNumber = 0; // Debugging for (int i = 0; i < dir.getNumEntries(); i++) { DebugVC50Subsection ss = dir.getSubsection(i); int ssType = ss.getSubsectionType(); boolean process = false; if ((ssType == SST_GLOBAL_SYM) || (ssType == SST_GLOBAL_PUB) || (ssType == SST_STATIC_SYM)) { DebugVC50SSSymbolBase syms = (DebugVC50SSSymbolBase) ss; symIter = syms.getSymbolIterator(); process = true; } if (ssType == SST_ALIGN_SYM) { DebugVC50SSAlignSym syms = (DebugVC50SSAlignSym) ss; symIter = syms.getSymbolIterator(); process = true; } if (process) { for (; !symIter.done(); symIter.next()) { switch (symIter.getType()) { case S_COMPILE: break; case S_SSEARCH: break; // FIXME: may need this later case S_END: { try { // FIXME: workaround for warnings until we figure out // what to do with THUNK32 symbols if (endsToSkip == 0) { blockStack.pop(); } else { --endsToSkip; } } catch (EmptyStackException e) { System.err.println("WARNING: mismatched block begins/ends in debug information"); } break; } case S_SKIP: break; case S_CVRESERVE: break; case S_OBJNAME: break; // FIXME: may need this later case S_ENDARG: break; case S_COBOLUDT: break; case S_MANYREG: break; // FIXME: may need to add support for this case S_RETURN: break; // NOTE: would need this if adding support for calling functions case S_ENTRYTHIS: break; // FIXME: may need to add support for this case S_REGISTER: break; // FIXME: may need to add support for this case S_CONSTANT: break; // FIXME: will need to add support for this case S_UDT: break; // FIXME: need to see how these are used; are // they redundant, or are they used to describe // global variables as opposed to types? case S_COBOLUDT2: break; case S_MANYREG2: break; case S_BPREL32: { LocalSym sym = new BasicLocalSym(symIter.getBPRelName(), getTypeByIndex(symIter.getBPRelType()), symIter.getBPRelOffset()); addLocalToCurBlock(sym); break; } case S_LDATA32: case S_GDATA32: { // FIXME: must handle these separately from global data (have // module scoping and only add these at the module level) boolean isModuleLocal = (symIter.getType() == S_LDATA32); GlobalSym sym = new BasicGlobalSym(symIter.getLGDataName(), getTypeByIndex(symIter.getLGDataType()), newAddress(symIter.getLGDataOffset(), symIter.getLGDataSegment()), isModuleLocal); // FIXME: must handle module-local symbols differently addGlobalSym(sym); break; } case S_PUB32: break; // FIXME: figure out how these differ from // above and how they are used case S_LPROC32: case S_GPROC32: { BasicFunctionSym sym = new BasicFunctionSym(newLazyBlockSym(symIter.getLGProcParentOffset()), symIter.getLGProcLength(), newAddress(symIter.getLGProcOffset(), symIter.getLGProcSegment()), symIter.getLGProcName(), getTypeByIndex(symIter.getLGProcType()), (symIter.getType() == S_LPROC32)); // FIXME: have to handle local procedures differently (have // notion of modules and only add those procedures to the // module they are defined in) addBlock(sym); break; } case S_THUNK32: { // FIXME: see whether we need to handle these skipEnd(); break; } case S_BLOCK32: { BasicBlockSym sym = new BasicBlockSym(newLazyBlockSym(symIter.getBlockParentOffset()), symIter.getBlockLength(), newAddress(symIter.getBlockOffset(), symIter.getBlockSegment()), symIter.getBlockName()); addBlock(sym); break; } case S_WITH32: break; case S_LABEL32: break; case S_CEXMODEL32: break; case S_VFTTABLE32: break; // FIXME: may need to handle this // (most likely for run-time type determination) case S_REGREL32: break; // FIXME: may need to add support for this case S_LTHREAD32: break; case S_GTHREAD32: break; // FIXME: may need to add support for these case S_PROCREF: break; case S_DATAREF: break; case S_ALIGN: break; default: // These two unknown symbol types show up very frequently. // Symbol type 0 appears to always be a no-op symbol of // length 2 (i.e., length just covers the symbol type.) // Symbol type 4115 appears to be a copyright notice for // the Microsoft linker. if ((symIter.getType() != 0) && (symIter.getType() != 4115)) { System.err.println(" NOTE: Unexpected symbol of type " + symIter.getType() + " at offset 0x" + Integer.toHexString(symIter.getOffset())); } break; } } } } // Add line number information for all modules for (int i = 0; i < dir.getNumEntries(); i++) { DebugVC50Subsection ss = dir.getSubsection(i); if (ss.getSubsectionType() == SST_SRC_MODULE) { DebugVC50SSSrcModule srcMod = (DebugVC50SSSrcModule) ss; for (int sf = 0; sf < srcMod.getNumSourceFiles(); sf++) { DebugVC50SrcModFileDesc desc = srcMod.getSourceFileDesc(sf); // Uniquify these to save space String name = desc.getSourceFileName().intern(); for (int cs = 0; cs < desc.getNumCodeSegments(); cs++) { DebugVC50SrcModLineNumberMap map = desc.getLineNumberMap(cs); SectionHeader seg = file.getHeader().getSectionHeader(map.getSegment()); for (int lp = 0; lp < map.getNumSourceLinePairs(); lp++) { Address startPC = base.addOffsetTo(seg.getVirtualAddress() + map.getCodeOffset(lp)); // Fake address for endPC -- will be filled in by BasicLineNumberMapping Address endPC = base.addOffsetTo(seg.getSize()); db.addLineNumberInfo(new BasicLineNumberInfo(name, map.getLineNumber(lp), startPC, endPC)); } } } } } // Finish assembly of database db.resolve(new ResolveListener() { public void resolveFailed(Type containingType, LazyType failedResolve, String detail) { System.err.println("WARNING: failed to resolve type of index " + ((Integer) failedResolve.getKey()).intValue() + " in type " + containingType.getName() + " (class " + containingType.getClass().getName() + ") while " + detail); } public void resolveFailed(Type containingType, String staticFieldName) { System.err.println("WARNING: failed to resolve address of static field \"" + staticFieldName + "\" in type " + containingType.getName()); } public void resolveFailed(Sym containingSymbol, LazyType failedResolve, String detail) { System.err.println("WARNING: failed to resolve type of index " + ((Integer) failedResolve.getKey()).intValue() + " in symbol of type " + containingSymbol.getClass().getName() + " while " + detail); } public void resolveFailed(Sym containingSymbol, LazyBlockSym failedResolve, String detail) { System.err.println("WARNING: failed to resolve block at offset 0x" + Integer.toHexString(((Integer) failedResolve.getKey()).intValue()) + " in symbol of type " + containingSymbol.getClass().getName() + " while " + detail); } }); db.endConstruction(); return db; } //---------------------------------------------------------------------- // Internals only below this point // private static DebugVC50 getDebugVC50(COFFFile file) { COFFHeader header = file.getHeader(); OptionalHeader opt = header.getOptionalHeader(); if (opt == null) { // Optional header not found return null; } OptionalHeaderDataDirectories dd = opt.getDataDirectories(); if (dd == null) { // Optional header data directories not found return null; } DebugDirectory debug = dd.getDebugDirectory(); if (debug == null) { // Debug directory not found return null; } for (int i = 0; i < debug.getNumEntries(); i++) { DebugDirectoryEntry entry = debug.getEntry(i); if (entry.getType() == DebugTypes.IMAGE_DEBUG_TYPE_CODEVIEW) { return entry.getDebugVC50(); } } // CodeView information not found in debug directory return null; } private DebugVC50SSSegMap getSegMap() { return (DebugVC50SSSegMap) findSubsection(SST_SEG_MAP); } private DebugVC50SSGlobalTypes getGlobalTypes() { return (DebugVC50SSGlobalTypes) findSubsection(SST_GLOBAL_TYPES); } private DebugVC50SSGlobalSym getGlobalSymbols() { return (DebugVC50SSGlobalSym) findSubsection(SST_GLOBAL_SYM); } private DebugVC50Subsection findSubsection(short ssType) { DebugVC50SubsectionDirectory dir = vc50.getSubsectionDirectory(); for (int i = 0; i < dir.getNumEntries(); i++) { DebugVC50Subsection ss = dir.getSubsection(i); if (ss.getSubsectionType() == ssType) { return ss; } } throw new DebuggerException("Unable to find subsection of type " + ssType); } private void putType(Type t) { db.addType(new Integer(iter.getTypeIndex()), t); } private Address newAddress(int offset, short segment) { int seg = segment & 0xFFFF; // NOTE: it isn't clear how to use the segMap to map from logical // to physical segments. It seems it would make more sense if the // SegDescs contained a physical segment number in addition to the // offset within the physical segment of the logical one. // Get the section header corresponding to this segment SectionHeader section = file.getHeader().getSectionHeader(seg); // Result is relative to image base return base.addOffsetTo(section.getVirtualAddress() + offset); } private BasicType getTypeByIndex(int intIndex) { Integer index = new Integer(intIndex); // Handle primitive types here. if (intIndex <= 0x0FFF) { BasicType type = (BasicType) primIndexToTypeMap.get(index); if (type != null) { return type; } // Construct appropriate new primitive type int primMode = intIndex & RESERVED_MODE_MASK; if (primMode == RESERVED_MODE_DIRECT) { int primType = intIndex & RESERVED_TYPE_MASK; switch (primType) { case RESERVED_TYPE_SIGNED_INT: case RESERVED_TYPE_UNSIGNED_INT: { boolean unsigned = (primType == RESERVED_TYPE_UNSIGNED_INT); int size = 0; String name = null; switch (intIndex & RESERVED_SIZE_MASK) { case RESERVED_SIZE_INT_1_BYTE: size = 1; name = "char"; break; case RESERVED_SIZE_INT_2_BYTE: size = 2; name = "short"; break; case RESERVED_SIZE_INT_4_BYTE: size = 4; name = "int"; break; case RESERVED_SIZE_INT_8_BYTE: size = 8; name = "__int64"; break; default: throw new DebuggerException("Illegal size of integer type " + intIndex); } type = new BasicIntType(name, size, unsigned); break; } case RESERVED_TYPE_BOOLEAN: { int size = 0; switch (intIndex & RESERVED_SIZE_MASK) { case RESERVED_SIZE_INT_1_BYTE: size = 1; break; case RESERVED_SIZE_INT_2_BYTE: size = 2; break; case RESERVED_SIZE_INT_4_BYTE: size = 4; break; case RESERVED_SIZE_INT_8_BYTE: size = 8; break; default: throw new DebuggerException("Illegal size of boolean type " + intIndex); } type = new BasicIntType("bool", size, false); break; } case RESERVED_TYPE_REAL: { switch (intIndex & RESERVED_SIZE_MASK) { case RESERVED_SIZE_REAL_32_BIT: type = new BasicFloatType("float", 4); break; case RESERVED_SIZE_REAL_64_BIT: type = new BasicDoubleType("double", 8); break; default: throw new DebuggerException("Unsupported floating-point size in type " + intIndex); } break; } case RESERVED_TYPE_REALLY_INT: { switch (intIndex & RESERVED_SIZE_MASK) { case RESERVED_SIZE_REALLY_INT_CHAR: type = new BasicIntType("char", 1, false); break; case RESERVED_SIZE_REALLY_INT_WCHAR: type = new BasicIntType("wchar", 2, false); break; case RESERVED_SIZE_REALLY_INT_2_BYTE: type = new BasicIntType("short", 2, false); break; case RESERVED_SIZE_REALLY_INT_2_BYTE_U: type = new BasicIntType("short", 2, true); break; case RESERVED_SIZE_REALLY_INT_4_BYTE: type = new BasicIntType("int", 4, false); break; case RESERVED_SIZE_REALLY_INT_4_BYTE_U: type = new BasicIntType("int", 4, true); break; case RESERVED_SIZE_REALLY_INT_8_BYTE: type = new BasicIntType("__int64", 8, false); break; case RESERVED_SIZE_REALLY_INT_8_BYTE_U: type = new BasicIntType("__int64", 8, true); break; default: throw new DebuggerException("Illegal REALLY_INT size in type " + intIndex); } break; } case RESERVED_TYPE_SPECIAL: { switch (intIndex & RESERVED_SIZE_MASK) { case RESERVED_SIZE_SPECIAL_NO_TYPE: case RESERVED_SIZE_SPECIAL_VOID: type = new BasicVoidType(); break; default: throw new DebuggerException("Don't know how to handle reserved special type " + intIndex); } break; } default: throw new DebuggerException("Don't know how to handle reserved type " + intIndex); } } else { // Fold all pointer types together since we only support // flat-mode addressing anyway Type targetType = getTypeByIndex(intIndex & (~RESERVED_MODE_MASK)); type = new BasicPointerType(POINTER_SIZE, targetType); } if (Assert.ASSERTS_ENABLED) { Assert.that(type != null, "Got null Type for primitive type " + intIndex); } primIndexToTypeMap.put(index, type); return type; } // Not primitive type. Construct lazy reference to target type. // (Is it worth canonicalizing these as well to save space?) return new LazyType(index); } private void addBlock(BlockSym block) { db.addBlock(new Integer(symIter.getOffset()), block); blockStack.push(block); } private void skipEnd() { ++endsToSkip; } private BlockSym newLazyBlockSym(int offset) { if (offset == 0) { return null; } return new LazyBlockSym(new Integer(offset)); } private int memberAttributeToAccessControl(short memberAttribute) { int acc = memberAttribute & MEMATTR_ACCESS_MASK; switch (acc) { case MEMATTR_ACCESS_NO_PROTECTION: return NO_PROTECTION; case MEMATTR_ACCESS_PRIVATE: return PRIVATE; case MEMATTR_ACCESS_PROTECTED: return PROTECTED; case MEMATTR_ACCESS_PUBLIC: return PUBLIC; default: throw new RuntimeException("Should not reach here"); } } private void addLocalToCurBlock(LocalSym local) { ((BasicBlockSym) blockStack.peek()).addLocal(local); } private void addGlobalSym(GlobalSym sym) { db.addGlobalSym(sym); } private void skipTypeRecord() { while (!iter.typeStringDone()) { iter.typeStringNext(); } } }
apache/hadoop
35,280
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF * licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.apache.hadoop.security; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_DNS_INTERFACE_KEY; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_DNS_NAMESERVER_KEY; import java.io.IOException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.URI; import java.net.UnknownHostException; import java.security.PrivilegedAction; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.ServiceLoader; import java.util.concurrent.TimeUnit; import javax.annotation.Nullable; import javax.naming.ConfigurationException; import javax.security.auth.kerberos.KerberosPrincipal; import javax.security.auth.kerberos.KerberosTicket; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.io.Text; import org.apache.hadoop.net.DNS; import org.apache.hadoop.net.DomainNameResolver; import org.apache.hadoop.net.DomainNameResolverFactory; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenInfo; import org.apache.hadoop.thirdparty.com.google.common.cache.CacheBuilder; import org.apache.hadoop.thirdparty.com.google.common.cache.CacheLoader; import org.apache.hadoop.thirdparty.com.google.common.cache.LoadingCache; import org.apache.hadoop.util.StopWatch; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.ZKUtil; import org.apache.zookeeper.client.ZKClientConfig; import org.apache.zookeeper.common.ClientX509Util; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.xbill.DNS.Name; import org.xbill.DNS.ResolverConfig; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.thirdparty.com.google.common.net.InetAddresses; /** * Security Utils. */ @InterfaceAudience.Public @InterfaceStability.Evolving public final class SecurityUtil { public static final Logger LOG = LoggerFactory.getLogger(SecurityUtil.class); public static final String HOSTNAME_PATTERN = "_HOST"; public static final String FAILED_TO_GET_UGI_MSG_HEADER = "Failed to obtain user group information:"; private SecurityUtil() { } // controls whether buildTokenService will use an ip or host/ip as given // by the user @VisibleForTesting static boolean useIpForTokenService; @VisibleForTesting static HostResolver hostResolver; private static DomainNameResolver domainNameResolver; private static boolean logSlowLookups; private static int slowLookupThresholdMs; private static long cachingInterval = 0; static { setConfigurationInternal(new Configuration()); } @InterfaceAudience.Public @InterfaceStability.Evolving public static void setConfiguration(Configuration conf) { LOG.info("Updating Configuration"); setConfigurationInternal(conf); } private static void setConfigurationInternal(Configuration conf) { boolean useIp = conf.getBoolean( CommonConfigurationKeys.HADOOP_SECURITY_TOKEN_SERVICE_USE_IP, CommonConfigurationKeys.HADOOP_SECURITY_TOKEN_SERVICE_USE_IP_DEFAULT); cachingInterval = conf.getTimeDuration( CommonConfigurationKeys.HADOOP_SECURITY_HOSTNAME_CACHE_EXPIRE_INTERVAL_SECONDS, CommonConfigurationKeys.HADOOP_SECURITY_HOSTNAME_CACHE_EXPIRE_INTERVAL_SECONDS_DEFAULT, TimeUnit.SECONDS); setTokenServiceUseIp(useIp); logSlowLookups = conf.getBoolean( CommonConfigurationKeys .HADOOP_SECURITY_DNS_LOG_SLOW_LOOKUPS_ENABLED_KEY, CommonConfigurationKeys .HADOOP_SECURITY_DNS_LOG_SLOW_LOOKUPS_ENABLED_DEFAULT); slowLookupThresholdMs = conf.getInt( CommonConfigurationKeys .HADOOP_SECURITY_DNS_LOG_SLOW_LOOKUPS_THRESHOLD_MS_KEY, CommonConfigurationKeys .HADOOP_SECURITY_DNS_LOG_SLOW_LOOKUPS_THRESHOLD_MS_DEFAULT); domainNameResolver = DomainNameResolverFactory.newInstance(conf, CommonConfigurationKeys.HADOOP_SECURITY_RESOLVER_IMPL); } /** * For use only by tests and initialization. * * @param flag flag. */ @InterfaceAudience.Private @VisibleForTesting public static void setTokenServiceUseIp(boolean flag) { if (LOG.isDebugEnabled()) { LOG.debug("Setting " + CommonConfigurationKeys.HADOOP_SECURITY_TOKEN_SERVICE_USE_IP + " to " + flag); } useIpForTokenService = flag; hostResolver = !useIpForTokenService ? new QualifiedHostResolver(cachingInterval) : new StandardHostResolver(cachingInterval); } /** * TGS must have the server principal of the form "krbtgt/FOO@FOO". * @param principal * @return true or false */ static boolean isTGSPrincipal(KerberosPrincipal principal) { if (principal == null) return false; if (principal.getName().equals("krbtgt/" + principal.getRealm() + "@" + principal.getRealm())) { return true; } return false; } /** * Check whether the server principal is the TGS's principal * @param ticket the original TGT (the ticket that is obtained when a * kinit is done) * @return true or false */ protected static boolean isOriginalTGT(KerberosTicket ticket) { return isTGSPrincipal(ticket.getServer()); } /** * Convert Kerberos principal name pattern to valid Kerberos principal * names. It replaces hostname pattern with hostname, which should be * fully-qualified domain name. If hostname is null or "0.0.0.0", it uses * dynamically looked-up fqdn of the current host instead. * * @param principalConfig * the Kerberos principal name conf value to convert * @param hostname * the fully-qualified domain name used for substitution * @return converted Kerberos principal name * @throws IOException if the client address cannot be determined */ @InterfaceAudience.Public @InterfaceStability.Evolving public static String getServerPrincipal(String principalConfig, String hostname) throws IOException { String[] components = getComponents(principalConfig); if (components == null || components.length != 3 || !components[1].equals(HOSTNAME_PATTERN)) { return principalConfig; } else { return replacePattern(components, hostname); } } /** * Convert Kerberos principal name pattern to valid Kerberos principal names. * This method is similar to {@link #getServerPrincipal(String, String)}, * except 1) the reverse DNS lookup from addr to hostname is done only when * necessary, 2) param addr can't be null (no default behavior of using local * hostname when addr is null). * * @param principalConfig * Kerberos principal name pattern to convert * @param addr * InetAddress of the host used for substitution * @return converted Kerberos principal name * @throws IOException if the client address cannot be determined */ @InterfaceAudience.Public @InterfaceStability.Evolving public static String getServerPrincipal(String principalConfig, InetAddress addr) throws IOException { String[] components = getComponents(principalConfig); if (components == null || components.length != 3 || !components[1].equals(HOSTNAME_PATTERN)) { return principalConfig; } else { if (addr == null) { throw new IOException("Can't replace " + HOSTNAME_PATTERN + " pattern since client address is null"); } return replacePattern(components, domainNameResolver.getHostnameByIP(addr)); } } private static String[] getComponents(String principalConfig) { if (principalConfig == null) return null; return principalConfig.split("[/@]"); } private static String replacePattern(String[] components, String hostname) throws IOException { String fqdn = hostname; if (fqdn == null || fqdn.isEmpty() || fqdn.equals("0.0.0.0")) { fqdn = getLocalHostName(null); } return components[0] + "/" + StringUtils.toLowerCase(fqdn) + "@" + components[2]; } /** * Retrieve the name of the current host. Multihomed hosts may restrict the * hostname lookup to a specific interface and nameserver with {@link * org.apache.hadoop.fs.CommonConfigurationKeysPublic#HADOOP_SECURITY_DNS_INTERFACE_KEY} * and {@link org.apache.hadoop.fs.CommonConfigurationKeysPublic#HADOOP_SECURITY_DNS_NAMESERVER_KEY} * * @param conf Configuration object. May be null. * @return * @throws UnknownHostException */ static String getLocalHostName(@Nullable Configuration conf) throws UnknownHostException { if (conf != null) { String dnsInterface = conf.get(HADOOP_SECURITY_DNS_INTERFACE_KEY); String nameServer = conf.get(HADOOP_SECURITY_DNS_NAMESERVER_KEY); if (dnsInterface != null) { return DNS.getDefaultHost(dnsInterface, nameServer, true); } else if (nameServer != null) { throw new IllegalArgumentException(HADOOP_SECURITY_DNS_NAMESERVER_KEY + " requires " + HADOOP_SECURITY_DNS_INTERFACE_KEY + ". Check your" + "configuration."); } } // Fallback to querying the default hostname as we did before. return InetAddress.getLocalHost().getCanonicalHostName(); } /** * Login as a principal specified in config. Substitute $host in * user's Kerberos principal name with a dynamically looked-up fully-qualified * domain name of the current host. * * @param conf * conf to use * @param keytabFileKey * the key to look for keytab file in conf * @param userNameKey * the key to look for user's Kerberos principal name in conf * @throws IOException if login fails */ @InterfaceAudience.Public @InterfaceStability.Evolving public static void login(final Configuration conf, final String keytabFileKey, final String userNameKey) throws IOException { login(conf, keytabFileKey, userNameKey, getLocalHostName(conf)); } /** * Login as a principal specified in config. Substitute $host in user's Kerberos principal * name with hostname. If non-secure mode - return. If no keytab available - * bail out with an exception * * @param conf * conf to use * @param keytabFileKey * the key to look for keytab file in conf * @param userNameKey * the key to look for user's Kerberos principal name in conf * @param hostname * hostname to use for substitution * @throws IOException if the config doesn't specify a keytab */ @InterfaceAudience.Public @InterfaceStability.Evolving public static void login(final Configuration conf, final String keytabFileKey, final String userNameKey, String hostname) throws IOException { if(! UserGroupInformation.isSecurityEnabled()) return; String keytabFilename = conf.get(keytabFileKey); if (keytabFilename == null || keytabFilename.length() == 0) { throw new IOException( "Running in secure mode, but config doesn't have a keytab for key: " + keytabFileKey); } String principalConfig = conf.get(userNameKey, System .getProperty("user.name")); String principalName = SecurityUtil.getServerPrincipal(principalConfig, hostname); UserGroupInformation.loginUserFromKeytab(principalName, keytabFilename); } /** * create the service name for a Delegation token * @param uri of the service * @param defPort is used if the uri lacks a port * @return the token service, or null if no authority * @see #buildTokenService(InetSocketAddress) */ public static String buildDTServiceName(URI uri, int defPort) { String authority = uri.getAuthority(); if (authority == null) { return null; } InetSocketAddress addr = NetUtils.createSocketAddr(authority, defPort); return buildTokenService(addr).toString(); } /** * Get the host name from the principal name of format {@literal <}service * {@literal >}/host@realm. * @param principalName principal name of format as described above * @return host name if the the string conforms to the above format, else null */ public static String getHostFromPrincipal(String principalName) { return new HadoopKerberosName(principalName).getHostName(); } private static ServiceLoader<SecurityInfo> securityInfoProviders = ServiceLoader.load(SecurityInfo.class); private static SecurityInfo[] testProviders = new SecurityInfo[0]; /** * Test setup method to register additional providers. * @param providers a list of high priority providers to use */ @InterfaceAudience.Private public static void setSecurityInfoProviders(SecurityInfo... providers) { testProviders = providers; } /** * Look up the KerberosInfo for a given protocol. It searches all known * SecurityInfo providers. * @param protocol the protocol class to get the information for * @param conf configuration object * @return the KerberosInfo or null if it has no KerberosInfo defined */ public static KerberosInfo getKerberosInfo(Class<?> protocol, Configuration conf) { for(SecurityInfo provider: testProviders) { KerberosInfo result = provider.getKerberosInfo(protocol, conf); if (result != null) { return result; } } synchronized (securityInfoProviders) { for(SecurityInfo provider: securityInfoProviders) { KerberosInfo result = provider.getKerberosInfo(protocol, conf); if (result != null) { return result; } } } return null; } /** * Look up the client principal for a given protocol. It searches all known * SecurityInfo providers. * @param protocol the protocol class to get the information for * @param conf configuration object * @return client principal or null if it has no client principal defined. */ public static String getClientPrincipal(Class<?> protocol, Configuration conf) { String user = null; KerberosInfo krbInfo = SecurityUtil.getKerberosInfo(protocol, conf); if (krbInfo != null) { String key = krbInfo.clientPrincipal(); user = (key != null && !key.isEmpty()) ? conf.get(key) : null; } return user; } /** * Look up the TokenInfo for a given protocol. It searches all known * SecurityInfo providers. * @param protocol The protocol class to get the information for. * @param conf Configuration object * @return the TokenInfo or null if it has no KerberosInfo defined */ public static TokenInfo getTokenInfo(Class<?> protocol, Configuration conf) { for(SecurityInfo provider: testProviders) { TokenInfo result = provider.getTokenInfo(protocol, conf); if (result != null) { return result; } } synchronized (securityInfoProviders) { for(SecurityInfo provider: securityInfoProviders) { TokenInfo result = provider.getTokenInfo(protocol, conf); if (result != null) { return result; } } } return null; } /** * Decode the given token's service field into an InetAddress * @param token from which to obtain the service * @return InetAddress for the service */ public static InetSocketAddress getTokenServiceAddr(Token<?> token) { return NetUtils.createSocketAddr(token.getService().toString()); } /** * Set the given token's service to the format expected by the RPC client * @param token a delegation token * @param addr the socket for the rpc connection */ public static void setTokenService(Token<?> token, InetSocketAddress addr) { Text service = buildTokenService(addr); if (token != null) { token.setService(service); if (LOG.isDebugEnabled()) { LOG.debug("Acquired token "+token); // Token#toString() prints service } } else { LOG.warn("Failed to get token for service "+service); } } /** * Construct the service key for a token * @param addr InetSocketAddress of remote connection with a token * @return "ip:port" or "host:port" depending on the value of * hadoop.security.token.service.use_ip */ public static Text buildTokenService(InetSocketAddress addr) { String host = null; if (useIpForTokenService) { if (addr.isUnresolved()) { // host has no ip address throw new IllegalArgumentException( new UnknownHostException(addr.getHostName()) ); } host = addr.getAddress().getHostAddress(); } else { host = StringUtils.toLowerCase(addr.getHostName()); } return new Text(host + ":" + addr.getPort()); } /** * Construct the service key for a token * @param uri of remote connection with a token * @return "ip:port" or "host:port" depending on the value of * hadoop.security.token.service.use_ip */ public static Text buildTokenService(URI uri) { return buildTokenService(NetUtils.createSocketAddr(uri.getAuthority())); } /** * Perform the given action as the daemon's login user. If the login * user cannot be determined, this will log a FATAL error and exit * the whole JVM. * * @param action action. * @param <T> generic type T. * @return generic type T. */ public static <T> T doAsLoginUserOrFatal(PrivilegedAction<T> action) { if (UserGroupInformation.isSecurityEnabled()) { UserGroupInformation ugi = null; try { ugi = UserGroupInformation.getLoginUser(); } catch (IOException e) { LOG.error("Exception while getting login user", e); e.printStackTrace(); Runtime.getRuntime().exit(-1); } return ugi.doAs(action); } else { return action.run(); } } /** * Perform the given action as the daemon's login user. If an * InterruptedException is thrown, it is converted to an IOException. * * @param action the action to perform * @param <T> Generics Type T. * @return the result of the action * @throws IOException in the event of error */ public static <T> T doAsLoginUser(PrivilegedExceptionAction<T> action) throws IOException { return doAsUser(UserGroupInformation.getLoginUser(), action); } /** * Perform the given action as the daemon's current user. If an * InterruptedException is thrown, it is converted to an IOException. * * @param action the action to perform * @param <T> generic type T. * @return the result of the action * @throws IOException in the event of error */ public static <T> T doAsCurrentUser(PrivilegedExceptionAction<T> action) throws IOException { return doAsUser(UserGroupInformation.getCurrentUser(), action); } private static <T> T doAsUser(UserGroupInformation ugi, PrivilegedExceptionAction<T> action) throws IOException { try { return ugi.doAs(action); } catch (InterruptedException ie) { throw new IOException(ie); } } /** * Resolves a host subject to the security requirements determined by * hadoop.security.token.service.use_ip. Optionally logs slow resolutions. * * @param hostname host or ip to resolve * @return a resolved host * @throws UnknownHostException if the host doesn't exist */ @InterfaceAudience.Private public static InetAddress getByName(String hostname) throws UnknownHostException { if (logSlowLookups || LOG.isTraceEnabled()) { StopWatch lookupTimer = new StopWatch().start(); InetAddress result = hostResolver.getByName(hostname); long elapsedMs = lookupTimer.stop().now(TimeUnit.MILLISECONDS); if (elapsedMs >= slowLookupThresholdMs) { LOG.warn("Slow name lookup for " + hostname + ". Took " + elapsedMs + " ms."); } else if (LOG.isTraceEnabled()) { LOG.trace("Name lookup for " + hostname + " took " + elapsedMs + " ms."); } return result; } else { return hostResolver.getByName(hostname); } } interface HostResolver { InetAddress getByName(String host) throws UnknownHostException; } static abstract class CacheableHostResolver implements HostResolver { private volatile LoadingCache<String, InetAddress> cache; CacheableHostResolver(long expiryIntervalSecs) { if (expiryIntervalSecs > 0) { cache = CacheBuilder.newBuilder() .expireAfterWrite(expiryIntervalSecs, TimeUnit.SECONDS) .build(new CacheLoader<String, InetAddress>() { @Override public InetAddress load(String key) throws Exception { return resolve(key); } }); } } protected abstract InetAddress resolve(String host) throws UnknownHostException; @Override public InetAddress getByName(String host) throws UnknownHostException { if (cache != null) { try { return cache.get(host); } catch (Exception e) { Throwable cause = e.getCause(); if (cause instanceof UnknownHostException) { throw (UnknownHostException) cause; } String message = (cause != null ? cause.getMessage() : "Unknown error"); throw new UnknownHostException("Error resolving host " + host + ": " + message); } } else { return resolve(host); } } @VisibleForTesting public LoadingCache<String, InetAddress> getCache() { return cache; } } /** * Uses standard java host resolution */ static class StandardHostResolver extends CacheableHostResolver { StandardHostResolver(long expiryIntervalSecs) { super(expiryIntervalSecs); } @Override public InetAddress resolve(String host) throws UnknownHostException { return InetAddress.getByName(host); } } /** * This an alternate resolver with important properties that the standard * java resolver lacks: * 1) The hostname is fully qualified. This avoids security issues if not * all hosts in the cluster do not share the same search domains. It * also prevents other hosts from performing unnecessary dns searches. * In contrast, InetAddress simply returns the host as given. * 2) The InetAddress is instantiated with an exact host and IP to prevent * further unnecessary lookups. InetAddress may perform an unnecessary * reverse lookup for an IP. * 3) A call to getHostName() will always return the qualified hostname, or * more importantly, the IP if instantiated with an IP. This avoids * unnecessary dns timeouts if the host is not resolvable. * 4) Point 3 also ensures that if the host is re-resolved, ex. during a * connection re-attempt, that a reverse lookup to host and forward * lookup to IP is not performed since the reverse/forward mappings may * not always return the same IP. If the client initiated a connection * with an IP, then that IP is all that should ever be contacted. * * NOTE: this resolver is only used if: * hadoop.security.token.service.use_ip=false */ protected static class QualifiedHostResolver extends CacheableHostResolver { private List<String> searchDomains = new ArrayList<>(); { ResolverConfig resolverConfig = ResolverConfig.getCurrentConfig(); for (Name name : resolverConfig.searchPath()) { searchDomains.add(name.toString()); } } QualifiedHostResolver() { this(0); } QualifiedHostResolver(long expiryIntervalSecs) { super(expiryIntervalSecs); } /** * Create an InetAddress with a fully qualified hostname of the given * hostname. InetAddress does not qualify an incomplete hostname that * is resolved via the domain search list. * {@link InetAddress#getCanonicalHostName()} will fully qualify the * hostname, but it always return the A record whereas the given hostname * may be a CNAME. * * @param host a hostname or ip address * @return InetAddress with the fully qualified hostname or ip * @throws UnknownHostException if host does not exist */ @Override public InetAddress resolve(String host) throws UnknownHostException { InetAddress addr = null; if (InetAddresses.isInetAddress(host)) { // valid ip address. use it as-is addr = InetAddresses.forString(host); // set hostname addr = InetAddress.getByAddress(host, addr.getAddress()); } else if (host.endsWith(".")) { // a rooted host ends with a dot, ex. "host." // rooted hosts never use the search path, so only try an exact lookup addr = getByExactName(host); } else if (host.contains(".")) { // the host contains a dot (domain), ex. "host.domain" // try an exact host lookup, then fallback to search list addr = getByExactName(host); if (addr == null) { addr = getByNameWithSearch(host); } } else { // it's a simple host with no dots, ex. "host" // try the search list, then fallback to exact host InetAddress loopback = InetAddress.getByName(null); if (host.equalsIgnoreCase(loopback.getHostName())) { addr = InetAddress.getByAddress(host, loopback.getAddress()); } else { addr = getByNameWithSearch(host); if (addr == null) { addr = getByExactName(host); } } } // unresolvable! if (addr == null) { throw new UnknownHostException(host); } return addr; } InetAddress getByExactName(String host) { InetAddress addr = null; // InetAddress will use the search list unless the host is rooted // with a trailing dot. The trailing dot will disable any use of the // search path in a lower level resolver. See RFC 1535. String fqHost = host; if (!fqHost.endsWith(".")) fqHost += "."; try { addr = getInetAddressByName(fqHost); // can't leave the hostname as rooted or other parts of the system // malfunction, ex. kerberos principals are lacking proper host // equivalence for rooted/non-rooted hostnames addr = InetAddress.getByAddress(host, addr.getAddress()); } catch (UnknownHostException e) { // ignore, caller will throw if necessary } return addr; } InetAddress getByNameWithSearch(String host) { InetAddress addr = null; if (host.endsWith(".")) { // already qualified? addr = getByExactName(host); } else { for (String domain : searchDomains) { String dot = !domain.startsWith(".") ? "." : ""; addr = getByExactName(host + dot + domain); if (addr != null) break; } } return addr; } // implemented as a separate method to facilitate unit testing InetAddress getInetAddressByName(String host) throws UnknownHostException { return InetAddress.getByName(host); } void setSearchDomains(String ... domains) { searchDomains = Arrays.asList(domains); } } public static AuthenticationMethod getAuthenticationMethod(Configuration conf) { String value = conf.get(HADOOP_SECURITY_AUTHENTICATION, "simple"); try { return Enum.valueOf(AuthenticationMethod.class, StringUtils.toUpperCase(value)); } catch (IllegalArgumentException iae) { throw new IllegalArgumentException("Invalid attribute value for " + HADOOP_SECURITY_AUTHENTICATION + " of " + value); } } public static void setAuthenticationMethod( AuthenticationMethod authenticationMethod, Configuration conf) { if (authenticationMethod == null) { authenticationMethod = AuthenticationMethod.SIMPLE; } conf.set(HADOOP_SECURITY_AUTHENTICATION, StringUtils.toLowerCase(authenticationMethod.toString())); } /* * Check if a given port is privileged. * The ports with number smaller than 1024 are treated as privileged ports in * unix/linux system. For other operating systems, use this method with care. * For example, Windows doesn't have the concept of privileged ports. * However, it may be used at Windows client to check port of linux server. * * @param port the port number * @return true for privileged ports, false otherwise * */ public static boolean isPrivilegedPort(final int port) { return port < 1024; } /** * Utility method to fetch ZK auth info from the configuration. * * @param conf configuration. * @param configKey config key. * @throws java.io.IOException if the Zookeeper ACLs configuration file * cannot be read * @throws ZKUtil.BadAuthFormatException if the auth format is invalid * @return ZKAuthInfo List. */ public static List<ZKUtil.ZKAuthInfo> getZKAuthInfos(Configuration conf, String configKey) throws IOException { char[] zkAuthChars = conf.getPassword(configKey); String zkAuthConf = zkAuthChars != null ? String.valueOf(zkAuthChars) : null; try { zkAuthConf = ZKUtil.resolveConfIndirection(zkAuthConf); if (zkAuthConf != null) { return ZKUtil.parseAuth(zkAuthConf); } else { return Collections.emptyList(); } } catch (IOException | ZKUtil.BadAuthFormatException e) { LOG.error("Couldn't read Auth based on {}", configKey); throw e; } } public static void validateSslConfiguration(TruststoreKeystore truststoreKeystore) throws ConfigurationException { if (org.apache.commons.lang3.StringUtils.isEmpty(truststoreKeystore.keystoreLocation)) { throw new ConfigurationException( "The keystore location parameter is empty for the ZooKeeper client connection."); } if (org.apache.commons.lang3.StringUtils.isEmpty(truststoreKeystore.keystorePassword)) { throw new ConfigurationException( "The keystore password parameter is empty for the ZooKeeper client connection."); } if (org.apache.commons.lang3.StringUtils.isEmpty(truststoreKeystore.truststoreLocation)) { throw new ConfigurationException( "The truststore location parameter is empty for the ZooKeeper client connection."); } if (org.apache.commons.lang3.StringUtils.isEmpty(truststoreKeystore.truststorePassword)) { throw new ConfigurationException( "The truststore password parameter is empty for the ZooKeeper client connection."); } } /** * Configure ZooKeeper Client with SSL/TLS connection. * @param zkClientConfig ZooKeeper Client configuration * @param truststoreKeystore truststore keystore, that we use to set the SSL configurations * @throws ConfigurationException if the SSL configs are empty */ public static void setSslConfiguration(ZKClientConfig zkClientConfig, TruststoreKeystore truststoreKeystore) throws ConfigurationException { setSslConfiguration(zkClientConfig, truststoreKeystore, new ClientX509Util()); } public static void setSslConfiguration(ZKClientConfig zkClientConfig, TruststoreKeystore truststoreKeystore, ClientX509Util x509Util) throws ConfigurationException { validateSslConfiguration(truststoreKeystore); LOG.info("Configuring the ZooKeeper client to use SSL/TLS encryption for connecting to the " + "ZooKeeper server."); LOG.debug("Configuring the ZooKeeper client with {} location: {}.", truststoreKeystore.keystoreLocation, CommonConfigurationKeys.ZK_SSL_KEYSTORE_LOCATION); LOG.debug("Configuring the ZooKeeper client with {} location: {}.", truststoreKeystore.truststoreLocation, CommonConfigurationKeys.ZK_SSL_TRUSTSTORE_LOCATION); zkClientConfig.setProperty(ZKClientConfig.SECURE_CLIENT, "true"); zkClientConfig.setProperty(ZKClientConfig.ZOOKEEPER_CLIENT_CNXN_SOCKET, "org.apache.zookeeper.ClientCnxnSocketNetty"); zkClientConfig.setProperty(x509Util.getSslKeystoreLocationProperty(), truststoreKeystore.keystoreLocation); zkClientConfig.setProperty(x509Util.getSslKeystorePasswdProperty(), truststoreKeystore.keystorePassword); zkClientConfig.setProperty(x509Util.getSslTruststoreLocationProperty(), truststoreKeystore.truststoreLocation); zkClientConfig.setProperty(x509Util.getSslTruststorePasswdProperty(), truststoreKeystore.truststorePassword); } /** * Helper class to contain the Truststore/Keystore paths for the ZK client connection over * SSL/TLS. */ public static class TruststoreKeystore { private final String keystoreLocation; private final String keystorePassword; private final String truststoreLocation; private final String truststorePassword; /** * Configuration for the ZooKeeper connection when SSL/TLS is enabled. * When a value is not configured, ensure that empty string is set instead of null. * * @param conf ZooKeeper Client configuration */ public TruststoreKeystore(Configuration conf) { keystoreLocation = conf.get(CommonConfigurationKeys.ZK_SSL_KEYSTORE_LOCATION, ""); keystorePassword = conf.get(CommonConfigurationKeys.ZK_SSL_KEYSTORE_PASSWORD, ""); truststoreLocation = conf.get(CommonConfigurationKeys.ZK_SSL_TRUSTSTORE_LOCATION, ""); truststorePassword = conf.get(CommonConfigurationKeys.ZK_SSL_TRUSTSTORE_PASSWORD, ""); } public String getKeystoreLocation() { return keystoreLocation; } public String getKeystorePassword() { return keystorePassword; } public String getTruststoreLocation() { return truststoreLocation; } public String getTruststorePassword() { return truststorePassword; } } }
googleapis/google-cloud-java
35,138
java-cloudcommerceconsumerprocurement/proto-google-cloud-cloudcommerceconsumerprocurement-v1alpha1/src/main/java/com/google/cloud/commerce/consumer/procurement/v1alpha1/Subscription.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/commerce/consumer/procurement/v1alpha1/order.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.commerce.consumer.procurement.v1alpha1; /** * * * <pre> * Subscription information. * </pre> * * Protobuf type {@code google.cloud.commerce.consumer.procurement.v1alpha1.Subscription} */ public final class Subscription extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.commerce.consumer.procurement.v1alpha1.Subscription) SubscriptionOrBuilder { private static final long serialVersionUID = 0L; // Use Subscription.newBuilder() to construct. private Subscription(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Subscription() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new Subscription(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.commerce.consumer.procurement.v1alpha1.OrderOuterClass .internal_static_google_cloud_commerce_consumer_procurement_v1alpha1_Subscription_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.commerce.consumer.procurement.v1alpha1.OrderOuterClass .internal_static_google_cloud_commerce_consumer_procurement_v1alpha1_Subscription_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription.class, com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription.Builder.class); } private int bitField0_; public static final int START_TIME_FIELD_NUMBER = 3; private com.google.protobuf.Timestamp startTime_; /** * * * <pre> * The timestamp when the subscription begins, if applicable. * </pre> * * <code>.google.protobuf.Timestamp start_time = 3;</code> * * @return Whether the startTime field is set. */ @java.lang.Override public boolean hasStartTime() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The timestamp when the subscription begins, if applicable. * </pre> * * <code>.google.protobuf.Timestamp start_time = 3;</code> * * @return The startTime. */ @java.lang.Override public com.google.protobuf.Timestamp getStartTime() { return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; } /** * * * <pre> * The timestamp when the subscription begins, if applicable. * </pre> * * <code>.google.protobuf.Timestamp start_time = 3;</code> */ @java.lang.Override public com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder() { return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; } public static final int END_TIME_FIELD_NUMBER = 1; private com.google.protobuf.Timestamp endTime_; /** * * * <pre> * The timestamp when the subscription ends, if applicable. * </pre> * * <code>.google.protobuf.Timestamp end_time = 1;</code> * * @return Whether the endTime field is set. */ @java.lang.Override public boolean hasEndTime() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The timestamp when the subscription ends, if applicable. * </pre> * * <code>.google.protobuf.Timestamp end_time = 1;</code> * * @return The endTime. */ @java.lang.Override public com.google.protobuf.Timestamp getEndTime() { return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_; } /** * * * <pre> * The timestamp when the subscription ends, if applicable. * </pre> * * <code>.google.protobuf.Timestamp end_time = 1;</code> */ @java.lang.Override public com.google.protobuf.TimestampOrBuilder getEndTimeOrBuilder() { return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_; } public static final int AUTO_RENEWAL_ENABLED_FIELD_NUMBER = 2; private boolean autoRenewalEnabled_ = false; /** * * * <pre> * Whether auto renewal is enabled by user choice on current subscription. * This field indicates order/subscription status after pending plan change is * cancelled or rejected. * </pre> * * <code>bool auto_renewal_enabled = 2;</code> * * @return The autoRenewalEnabled. */ @java.lang.Override public boolean getAutoRenewalEnabled() { return autoRenewalEnabled_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(1, getEndTime()); } if (autoRenewalEnabled_ != false) { output.writeBool(2, autoRenewalEnabled_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(3, getStartTime()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getEndTime()); } if (autoRenewalEnabled_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(2, autoRenewalEnabled_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getStartTime()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription)) { return super.equals(obj); } com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription other = (com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription) obj; if (hasStartTime() != other.hasStartTime()) return false; if (hasStartTime()) { if (!getStartTime().equals(other.getStartTime())) return false; } if (hasEndTime() != other.hasEndTime()) return false; if (hasEndTime()) { if (!getEndTime().equals(other.getEndTime())) return false; } if (getAutoRenewalEnabled() != other.getAutoRenewalEnabled()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasStartTime()) { hash = (37 * hash) + START_TIME_FIELD_NUMBER; hash = (53 * hash) + getStartTime().hashCode(); } if (hasEndTime()) { hash = (37 * hash) + END_TIME_FIELD_NUMBER; hash = (53 * hash) + getEndTime().hashCode(); } hash = (37 * hash) + AUTO_RENEWAL_ENABLED_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getAutoRenewalEnabled()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Subscription information. * </pre> * * Protobuf type {@code google.cloud.commerce.consumer.procurement.v1alpha1.Subscription} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.commerce.consumer.procurement.v1alpha1.Subscription) com.google.cloud.commerce.consumer.procurement.v1alpha1.SubscriptionOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.commerce.consumer.procurement.v1alpha1.OrderOuterClass .internal_static_google_cloud_commerce_consumer_procurement_v1alpha1_Subscription_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.commerce.consumer.procurement.v1alpha1.OrderOuterClass .internal_static_google_cloud_commerce_consumer_procurement_v1alpha1_Subscription_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription.class, com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription.Builder.class); } // Construct using // com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getStartTimeFieldBuilder(); getEndTimeFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; startTime_ = null; if (startTimeBuilder_ != null) { startTimeBuilder_.dispose(); startTimeBuilder_ = null; } endTime_ = null; if (endTimeBuilder_ != null) { endTimeBuilder_.dispose(); endTimeBuilder_ = null; } autoRenewalEnabled_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.commerce.consumer.procurement.v1alpha1.OrderOuterClass .internal_static_google_cloud_commerce_consumer_procurement_v1alpha1_Subscription_descriptor; } @java.lang.Override public com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription getDefaultInstanceForType() { return com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription .getDefaultInstance(); } @java.lang.Override public com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription build() { com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription buildPartial() { com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription result = new com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.startTime_ = startTimeBuilder_ == null ? startTime_ : startTimeBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.endTime_ = endTimeBuilder_ == null ? endTime_ : endTimeBuilder_.build(); to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.autoRenewalEnabled_ = autoRenewalEnabled_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription) { return mergeFrom( (com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription other) { if (other == com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription .getDefaultInstance()) return this; if (other.hasStartTime()) { mergeStartTime(other.getStartTime()); } if (other.hasEndTime()) { mergeEndTime(other.getEndTime()); } if (other.getAutoRenewalEnabled() != false) { setAutoRenewalEnabled(other.getAutoRenewalEnabled()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getEndTimeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 10 case 16: { autoRenewalEnabled_ = input.readBool(); bitField0_ |= 0x00000004; break; } // case 16 case 26: { input.readMessage(getStartTimeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.protobuf.Timestamp startTime_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> startTimeBuilder_; /** * * * <pre> * The timestamp when the subscription begins, if applicable. * </pre> * * <code>.google.protobuf.Timestamp start_time = 3;</code> * * @return Whether the startTime field is set. */ public boolean hasStartTime() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The timestamp when the subscription begins, if applicable. * </pre> * * <code>.google.protobuf.Timestamp start_time = 3;</code> * * @return The startTime. */ public com.google.protobuf.Timestamp getStartTime() { if (startTimeBuilder_ == null) { return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; } else { return startTimeBuilder_.getMessage(); } } /** * * * <pre> * The timestamp when the subscription begins, if applicable. * </pre> * * <code>.google.protobuf.Timestamp start_time = 3;</code> */ public Builder setStartTime(com.google.protobuf.Timestamp value) { if (startTimeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } startTime_ = value; } else { startTimeBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The timestamp when the subscription begins, if applicable. * </pre> * * <code>.google.protobuf.Timestamp start_time = 3;</code> */ public Builder setStartTime(com.google.protobuf.Timestamp.Builder builderForValue) { if (startTimeBuilder_ == null) { startTime_ = builderForValue.build(); } else { startTimeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The timestamp when the subscription begins, if applicable. * </pre> * * <code>.google.protobuf.Timestamp start_time = 3;</code> */ public Builder mergeStartTime(com.google.protobuf.Timestamp value) { if (startTimeBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && startTime_ != null && startTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { getStartTimeBuilder().mergeFrom(value); } else { startTime_ = value; } } else { startTimeBuilder_.mergeFrom(value); } if (startTime_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * The timestamp when the subscription begins, if applicable. * </pre> * * <code>.google.protobuf.Timestamp start_time = 3;</code> */ public Builder clearStartTime() { bitField0_ = (bitField0_ & ~0x00000001); startTime_ = null; if (startTimeBuilder_ != null) { startTimeBuilder_.dispose(); startTimeBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The timestamp when the subscription begins, if applicable. * </pre> * * <code>.google.protobuf.Timestamp start_time = 3;</code> */ public com.google.protobuf.Timestamp.Builder getStartTimeBuilder() { bitField0_ |= 0x00000001; onChanged(); return getStartTimeFieldBuilder().getBuilder(); } /** * * * <pre> * The timestamp when the subscription begins, if applicable. * </pre> * * <code>.google.protobuf.Timestamp start_time = 3;</code> */ public com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder() { if (startTimeBuilder_ != null) { return startTimeBuilder_.getMessageOrBuilder(); } else { return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; } } /** * * * <pre> * The timestamp when the subscription begins, if applicable. * </pre> * * <code>.google.protobuf.Timestamp start_time = 3;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> getStartTimeFieldBuilder() { if (startTimeBuilder_ == null) { startTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( getStartTime(), getParentForChildren(), isClean()); startTime_ = null; } return startTimeBuilder_; } private com.google.protobuf.Timestamp endTime_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> endTimeBuilder_; /** * * * <pre> * The timestamp when the subscription ends, if applicable. * </pre> * * <code>.google.protobuf.Timestamp end_time = 1;</code> * * @return Whether the endTime field is set. */ public boolean hasEndTime() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The timestamp when the subscription ends, if applicable. * </pre> * * <code>.google.protobuf.Timestamp end_time = 1;</code> * * @return The endTime. */ public com.google.protobuf.Timestamp getEndTime() { if (endTimeBuilder_ == null) { return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_; } else { return endTimeBuilder_.getMessage(); } } /** * * * <pre> * The timestamp when the subscription ends, if applicable. * </pre> * * <code>.google.protobuf.Timestamp end_time = 1;</code> */ public Builder setEndTime(com.google.protobuf.Timestamp value) { if (endTimeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } endTime_ = value; } else { endTimeBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The timestamp when the subscription ends, if applicable. * </pre> * * <code>.google.protobuf.Timestamp end_time = 1;</code> */ public Builder setEndTime(com.google.protobuf.Timestamp.Builder builderForValue) { if (endTimeBuilder_ == null) { endTime_ = builderForValue.build(); } else { endTimeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The timestamp when the subscription ends, if applicable. * </pre> * * <code>.google.protobuf.Timestamp end_time = 1;</code> */ public Builder mergeEndTime(com.google.protobuf.Timestamp value) { if (endTimeBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && endTime_ != null && endTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { getEndTimeBuilder().mergeFrom(value); } else { endTime_ = value; } } else { endTimeBuilder_.mergeFrom(value); } if (endTime_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * The timestamp when the subscription ends, if applicable. * </pre> * * <code>.google.protobuf.Timestamp end_time = 1;</code> */ public Builder clearEndTime() { bitField0_ = (bitField0_ & ~0x00000002); endTime_ = null; if (endTimeBuilder_ != null) { endTimeBuilder_.dispose(); endTimeBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The timestamp when the subscription ends, if applicable. * </pre> * * <code>.google.protobuf.Timestamp end_time = 1;</code> */ public com.google.protobuf.Timestamp.Builder getEndTimeBuilder() { bitField0_ |= 0x00000002; onChanged(); return getEndTimeFieldBuilder().getBuilder(); } /** * * * <pre> * The timestamp when the subscription ends, if applicable. * </pre> * * <code>.google.protobuf.Timestamp end_time = 1;</code> */ public com.google.protobuf.TimestampOrBuilder getEndTimeOrBuilder() { if (endTimeBuilder_ != null) { return endTimeBuilder_.getMessageOrBuilder(); } else { return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_; } } /** * * * <pre> * The timestamp when the subscription ends, if applicable. * </pre> * * <code>.google.protobuf.Timestamp end_time = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> getEndTimeFieldBuilder() { if (endTimeBuilder_ == null) { endTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( getEndTime(), getParentForChildren(), isClean()); endTime_ = null; } return endTimeBuilder_; } private boolean autoRenewalEnabled_; /** * * * <pre> * Whether auto renewal is enabled by user choice on current subscription. * This field indicates order/subscription status after pending plan change is * cancelled or rejected. * </pre> * * <code>bool auto_renewal_enabled = 2;</code> * * @return The autoRenewalEnabled. */ @java.lang.Override public boolean getAutoRenewalEnabled() { return autoRenewalEnabled_; } /** * * * <pre> * Whether auto renewal is enabled by user choice on current subscription. * This field indicates order/subscription status after pending plan change is * cancelled or rejected. * </pre> * * <code>bool auto_renewal_enabled = 2;</code> * * @param value The autoRenewalEnabled to set. * @return This builder for chaining. */ public Builder setAutoRenewalEnabled(boolean value) { autoRenewalEnabled_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Whether auto renewal is enabled by user choice on current subscription. * This field indicates order/subscription status after pending plan change is * cancelled or rejected. * </pre> * * <code>bool auto_renewal_enabled = 2;</code> * * @return This builder for chaining. */ public Builder clearAutoRenewalEnabled() { bitField0_ = (bitField0_ & ~0x00000004); autoRenewalEnabled_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.commerce.consumer.procurement.v1alpha1.Subscription) } // @@protoc_insertion_point(class_scope:google.cloud.commerce.consumer.procurement.v1alpha1.Subscription) private static final com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription(); } public static com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<Subscription> PARSER = new com.google.protobuf.AbstractParser<Subscription>() { @java.lang.Override public Subscription parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<Subscription> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<Subscription> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.commerce.consumer.procurement.v1alpha1.Subscription getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,138
java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/InterconnectApplicationAwareInterconnectBandwidthPercentage.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/compute/v1/compute.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.compute.v1; /** * * * <pre> * Specify bandwidth percentages [1-100] for various traffic classes in BandwidthPercentagePolicy. The sum of all percentages must equal 100. All traffic classes must have a percentage value specified. * </pre> * * Protobuf type {@code * google.cloud.compute.v1.InterconnectApplicationAwareInterconnectBandwidthPercentage} */ public final class InterconnectApplicationAwareInterconnectBandwidthPercentage extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.compute.v1.InterconnectApplicationAwareInterconnectBandwidthPercentage) InterconnectApplicationAwareInterconnectBandwidthPercentageOrBuilder { private static final long serialVersionUID = 0L; // Use InterconnectApplicationAwareInterconnectBandwidthPercentage.newBuilder() to construct. private InterconnectApplicationAwareInterconnectBandwidthPercentage( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private InterconnectApplicationAwareInterconnectBandwidthPercentage() { trafficClass_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new InterconnectApplicationAwareInterconnectBandwidthPercentage(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_InterconnectApplicationAwareInterconnectBandwidthPercentage_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_InterconnectApplicationAwareInterconnectBandwidthPercentage_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.InterconnectApplicationAwareInterconnectBandwidthPercentage .class, com.google.cloud.compute.v1.InterconnectApplicationAwareInterconnectBandwidthPercentage .Builder.class); } /** * * * <pre> * TrafficClass whose bandwidth percentage is being specified. * </pre> * * Protobuf enum {@code * google.cloud.compute.v1.InterconnectApplicationAwareInterconnectBandwidthPercentage.TrafficClass} */ public enum TrafficClass implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * A value indicating that the enum field is not set. * </pre> * * <code>UNDEFINED_TRAFFIC_CLASS = 0;</code> */ UNDEFINED_TRAFFIC_CLASS(0), /** * * * <pre> * Traffic Class 1, corresponding to DSCP ranges (0-7) 000xxx. * </pre> * * <code>TC1 = 82850;</code> */ TC1(82850), /** * * * <pre> * Traffic Class 2, corresponding to DSCP ranges (8-15) 001xxx. * </pre> * * <code>TC2 = 82851;</code> */ TC2(82851), /** * * * <pre> * Traffic Class 3, corresponding to DSCP ranges (16-23) 010xxx. * </pre> * * <code>TC3 = 82852;</code> */ TC3(82852), /** * * * <pre> * Traffic Class 4, corresponding to DSCP ranges (24-31) 011xxx. * </pre> * * <code>TC4 = 82853;</code> */ TC4(82853), /** * * * <pre> * Traffic Class 5, corresponding to DSCP ranges (32-47) 10xxxx. * </pre> * * <code>TC5 = 82854;</code> */ TC5(82854), /** * * * <pre> * Traffic Class 6, corresponding to DSCP ranges (48-63) 11xxxx. * </pre> * * <code>TC6 = 82855;</code> */ TC6(82855), UNRECOGNIZED(-1), ; /** * * * <pre> * A value indicating that the enum field is not set. * </pre> * * <code>UNDEFINED_TRAFFIC_CLASS = 0;</code> */ public static final int UNDEFINED_TRAFFIC_CLASS_VALUE = 0; /** * * * <pre> * Traffic Class 1, corresponding to DSCP ranges (0-7) 000xxx. * </pre> * * <code>TC1 = 82850;</code> */ public static final int TC1_VALUE = 82850; /** * * * <pre> * Traffic Class 2, corresponding to DSCP ranges (8-15) 001xxx. * </pre> * * <code>TC2 = 82851;</code> */ public static final int TC2_VALUE = 82851; /** * * * <pre> * Traffic Class 3, corresponding to DSCP ranges (16-23) 010xxx. * </pre> * * <code>TC3 = 82852;</code> */ public static final int TC3_VALUE = 82852; /** * * * <pre> * Traffic Class 4, corresponding to DSCP ranges (24-31) 011xxx. * </pre> * * <code>TC4 = 82853;</code> */ public static final int TC4_VALUE = 82853; /** * * * <pre> * Traffic Class 5, corresponding to DSCP ranges (32-47) 10xxxx. * </pre> * * <code>TC5 = 82854;</code> */ public static final int TC5_VALUE = 82854; /** * * * <pre> * Traffic Class 6, corresponding to DSCP ranges (48-63) 11xxxx. * </pre> * * <code>TC6 = 82855;</code> */ public static final int TC6_VALUE = 82855; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static TrafficClass valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static TrafficClass forNumber(int value) { switch (value) { case 0: return UNDEFINED_TRAFFIC_CLASS; case 82850: return TC1; case 82851: return TC2; case 82852: return TC3; case 82853: return TC4; case 82854: return TC5; case 82855: return TC6; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<TrafficClass> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<TrafficClass> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<TrafficClass>() { public TrafficClass findValueByNumber(int number) { return TrafficClass.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.cloud.compute.v1.InterconnectApplicationAwareInterconnectBandwidthPercentage .getDescriptor() .getEnumTypes() .get(0); } private static final TrafficClass[] VALUES = values(); public static TrafficClass valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private TrafficClass(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.cloud.compute.v1.InterconnectApplicationAwareInterconnectBandwidthPercentage.TrafficClass) } private int bitField0_; public static final int PERCENTAGE_FIELD_NUMBER = 151909018; private int percentage_ = 0; /** * * * <pre> * Bandwidth percentage for a specific traffic class. * </pre> * * <code>optional uint32 percentage = 151909018;</code> * * @return Whether the percentage field is set. */ @java.lang.Override public boolean hasPercentage() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Bandwidth percentage for a specific traffic class. * </pre> * * <code>optional uint32 percentage = 151909018;</code> * * @return The percentage. */ @java.lang.Override public int getPercentage() { return percentage_; } public static final int TRAFFIC_CLASS_FIELD_NUMBER = 198180022; @SuppressWarnings("serial") private volatile java.lang.Object trafficClass_ = ""; /** * * * <pre> * TrafficClass whose bandwidth percentage is being specified. * Check the TrafficClass enum for the list of possible values. * </pre> * * <code>optional string traffic_class = 198180022;</code> * * @return Whether the trafficClass field is set. */ @java.lang.Override public boolean hasTrafficClass() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * TrafficClass whose bandwidth percentage is being specified. * Check the TrafficClass enum for the list of possible values. * </pre> * * <code>optional string traffic_class = 198180022;</code> * * @return The trafficClass. */ @java.lang.Override public java.lang.String getTrafficClass() { java.lang.Object ref = trafficClass_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); trafficClass_ = s; return s; } } /** * * * <pre> * TrafficClass whose bandwidth percentage is being specified. * Check the TrafficClass enum for the list of possible values. * </pre> * * <code>optional string traffic_class = 198180022;</code> * * @return The bytes for trafficClass. */ @java.lang.Override public com.google.protobuf.ByteString getTrafficClassBytes() { java.lang.Object ref = trafficClass_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); trafficClass_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeUInt32(151909018, percentage_); } if (((bitField0_ & 0x00000002) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 198180022, trafficClass_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeUInt32Size(151909018, percentage_); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(198180022, trafficClass_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.compute.v1.InterconnectApplicationAwareInterconnectBandwidthPercentage)) { return super.equals(obj); } com.google.cloud.compute.v1.InterconnectApplicationAwareInterconnectBandwidthPercentage other = (com.google.cloud.compute.v1.InterconnectApplicationAwareInterconnectBandwidthPercentage) obj; if (hasPercentage() != other.hasPercentage()) return false; if (hasPercentage()) { if (getPercentage() != other.getPercentage()) return false; } if (hasTrafficClass() != other.hasTrafficClass()) return false; if (hasTrafficClass()) { if (!getTrafficClass().equals(other.getTrafficClass())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasPercentage()) { hash = (37 * hash) + PERCENTAGE_FIELD_NUMBER; hash = (53 * hash) + getPercentage(); } if (hasTrafficClass()) { hash = (37 * hash) + TRAFFIC_CLASS_FIELD_NUMBER; hash = (53 * hash) + getTrafficClass().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.compute.v1 .InterconnectApplicationAwareInterconnectBandwidthPercentage parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1 .InterconnectApplicationAwareInterconnectBandwidthPercentage parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1 .InterconnectApplicationAwareInterconnectBandwidthPercentage parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1 .InterconnectApplicationAwareInterconnectBandwidthPercentage parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1 .InterconnectApplicationAwareInterconnectBandwidthPercentage parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1 .InterconnectApplicationAwareInterconnectBandwidthPercentage parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1 .InterconnectApplicationAwareInterconnectBandwidthPercentage parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1 .InterconnectApplicationAwareInterconnectBandwidthPercentage parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1 .InterconnectApplicationAwareInterconnectBandwidthPercentage parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.compute.v1 .InterconnectApplicationAwareInterconnectBandwidthPercentage parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1 .InterconnectApplicationAwareInterconnectBandwidthPercentage parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1 .InterconnectApplicationAwareInterconnectBandwidthPercentage parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.compute.v1.InterconnectApplicationAwareInterconnectBandwidthPercentage prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Specify bandwidth percentages [1-100] for various traffic classes in BandwidthPercentagePolicy. The sum of all percentages must equal 100. All traffic classes must have a percentage value specified. * </pre> * * Protobuf type {@code * google.cloud.compute.v1.InterconnectApplicationAwareInterconnectBandwidthPercentage} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.InterconnectApplicationAwareInterconnectBandwidthPercentage) com.google.cloud.compute.v1 .InterconnectApplicationAwareInterconnectBandwidthPercentageOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_InterconnectApplicationAwareInterconnectBandwidthPercentage_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_InterconnectApplicationAwareInterconnectBandwidthPercentage_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1 .InterconnectApplicationAwareInterconnectBandwidthPercentage.class, com.google.cloud.compute.v1 .InterconnectApplicationAwareInterconnectBandwidthPercentage.Builder.class); } // Construct using // com.google.cloud.compute.v1.InterconnectApplicationAwareInterconnectBandwidthPercentage.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; percentage_ = 0; trafficClass_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_InterconnectApplicationAwareInterconnectBandwidthPercentage_descriptor; } @java.lang.Override public com.google.cloud.compute.v1.InterconnectApplicationAwareInterconnectBandwidthPercentage getDefaultInstanceForType() { return com.google.cloud.compute.v1.InterconnectApplicationAwareInterconnectBandwidthPercentage .getDefaultInstance(); } @java.lang.Override public com.google.cloud.compute.v1.InterconnectApplicationAwareInterconnectBandwidthPercentage build() { com.google.cloud.compute.v1.InterconnectApplicationAwareInterconnectBandwidthPercentage result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.compute.v1.InterconnectApplicationAwareInterconnectBandwidthPercentage buildPartial() { com.google.cloud.compute.v1.InterconnectApplicationAwareInterconnectBandwidthPercentage result = new com.google.cloud.compute.v1 .InterconnectApplicationAwareInterconnectBandwidthPercentage(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.compute.v1.InterconnectApplicationAwareInterconnectBandwidthPercentage result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.percentage_ = percentage_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.trafficClass_ = trafficClass_; to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.compute.v1.InterconnectApplicationAwareInterconnectBandwidthPercentage) { return mergeFrom( (com.google.cloud.compute.v1 .InterconnectApplicationAwareInterconnectBandwidthPercentage) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.compute.v1.InterconnectApplicationAwareInterconnectBandwidthPercentage other) { if (other == com.google.cloud.compute.v1.InterconnectApplicationAwareInterconnectBandwidthPercentage .getDefaultInstance()) return this; if (other.hasPercentage()) { setPercentage(other.getPercentage()); } if (other.hasTrafficClass()) { trafficClass_ = other.trafficClass_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 1215272144: { percentage_ = input.readUInt32(); bitField0_ |= 0x00000001; break; } // case 1215272144 case 1585440178: { trafficClass_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 1585440178 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int percentage_; /** * * * <pre> * Bandwidth percentage for a specific traffic class. * </pre> * * <code>optional uint32 percentage = 151909018;</code> * * @return Whether the percentage field is set. */ @java.lang.Override public boolean hasPercentage() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Bandwidth percentage for a specific traffic class. * </pre> * * <code>optional uint32 percentage = 151909018;</code> * * @return The percentage. */ @java.lang.Override public int getPercentage() { return percentage_; } /** * * * <pre> * Bandwidth percentage for a specific traffic class. * </pre> * * <code>optional uint32 percentage = 151909018;</code> * * @param value The percentage to set. * @return This builder for chaining. */ public Builder setPercentage(int value) { percentage_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Bandwidth percentage for a specific traffic class. * </pre> * * <code>optional uint32 percentage = 151909018;</code> * * @return This builder for chaining. */ public Builder clearPercentage() { bitField0_ = (bitField0_ & ~0x00000001); percentage_ = 0; onChanged(); return this; } private java.lang.Object trafficClass_ = ""; /** * * * <pre> * TrafficClass whose bandwidth percentage is being specified. * Check the TrafficClass enum for the list of possible values. * </pre> * * <code>optional string traffic_class = 198180022;</code> * * @return Whether the trafficClass field is set. */ public boolean hasTrafficClass() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * TrafficClass whose bandwidth percentage is being specified. * Check the TrafficClass enum for the list of possible values. * </pre> * * <code>optional string traffic_class = 198180022;</code> * * @return The trafficClass. */ public java.lang.String getTrafficClass() { java.lang.Object ref = trafficClass_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); trafficClass_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * TrafficClass whose bandwidth percentage is being specified. * Check the TrafficClass enum for the list of possible values. * </pre> * * <code>optional string traffic_class = 198180022;</code> * * @return The bytes for trafficClass. */ public com.google.protobuf.ByteString getTrafficClassBytes() { java.lang.Object ref = trafficClass_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); trafficClass_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * TrafficClass whose bandwidth percentage is being specified. * Check the TrafficClass enum for the list of possible values. * </pre> * * <code>optional string traffic_class = 198180022;</code> * * @param value The trafficClass to set. * @return This builder for chaining. */ public Builder setTrafficClass(java.lang.String value) { if (value == null) { throw new NullPointerException(); } trafficClass_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * TrafficClass whose bandwidth percentage is being specified. * Check the TrafficClass enum for the list of possible values. * </pre> * * <code>optional string traffic_class = 198180022;</code> * * @return This builder for chaining. */ public Builder clearTrafficClass() { trafficClass_ = getDefaultInstance().getTrafficClass(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * TrafficClass whose bandwidth percentage is being specified. * Check the TrafficClass enum for the list of possible values. * </pre> * * <code>optional string traffic_class = 198180022;</code> * * @param value The bytes for trafficClass to set. * @return This builder for chaining. */ public Builder setTrafficClassBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); trafficClass_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.InterconnectApplicationAwareInterconnectBandwidthPercentage) } // @@protoc_insertion_point(class_scope:google.cloud.compute.v1.InterconnectApplicationAwareInterconnectBandwidthPercentage) private static final com.google.cloud.compute.v1 .InterconnectApplicationAwareInterconnectBandwidthPercentage DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.compute.v1 .InterconnectApplicationAwareInterconnectBandwidthPercentage(); } public static com.google.cloud.compute.v1 .InterconnectApplicationAwareInterconnectBandwidthPercentage getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser< InterconnectApplicationAwareInterconnectBandwidthPercentage> PARSER = new com.google.protobuf.AbstractParser< InterconnectApplicationAwareInterconnectBandwidthPercentage>() { @java.lang.Override public InterconnectApplicationAwareInterconnectBandwidthPercentage parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException() .setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser< InterconnectApplicationAwareInterconnectBandwidthPercentage> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<InterconnectApplicationAwareInterconnectBandwidthPercentage> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.compute.v1.InterconnectApplicationAwareInterconnectBandwidthPercentage getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }